Merge pull request #221 from gfyoung/bad-200-type-urlretrieve
Clean up temp files before urlretrieve
This commit is contained in:
commit
f5d0497961
1 changed files with 5 additions and 2 deletions
|
@ -19,9 +19,9 @@ import shutil
|
|||
import fnmatch
|
||||
from datetime import datetime
|
||||
try:
|
||||
from urllib.request import FancyURLopener
|
||||
from urllib.request import FancyURLopener, urlcleanup
|
||||
except ImportError:
|
||||
from urllib import FancyURLopener
|
||||
from urllib import FancyURLopener, urlcleanup
|
||||
|
||||
curdir = dirname(__file__)
|
||||
sys.path.insert(0, join(curdir, "tools", "external"))
|
||||
|
@ -410,6 +410,9 @@ class Recipe(object):
|
|||
if exists(filename):
|
||||
unlink(filename)
|
||||
|
||||
# Clean up temporary files just in case before downloading.
|
||||
urlcleanup()
|
||||
|
||||
print('Downloading {0}'.format(url))
|
||||
urlretrieve(url, filename, report_hook)
|
||||
return filename
|
||||
|
|
Loading…
Reference in a new issue