Clean up temp files before urlretrieve

This commit is contained in:
gfyoung 2016-11-06 03:30:58 -05:00
parent c7624becbd
commit 0cb7a8ff10

View file

@ -19,9 +19,9 @@ import shutil
import fnmatch import fnmatch
from datetime import datetime from datetime import datetime
try: try:
from urllib.request import FancyURLopener from urllib.request import FancyURLopener, urlcleanup
except ImportError: except ImportError:
from urllib import FancyURLopener from urllib import FancyURLopener, urlcleanup
curdir = dirname(__file__) curdir = dirname(__file__)
sys.path.insert(0, join(curdir, "tools", "external")) sys.path.insert(0, join(curdir, "tools", "external"))
@ -410,6 +410,9 @@ class Recipe(object):
if exists(filename): if exists(filename):
unlink(filename) unlink(filename)
# Clean up temporary files just in case before downloading.
urlcleanup()
print('Downloading {0}'.format(url)) print('Downloading {0}'.format(url))
urlretrieve(url, filename, report_hook) urlretrieve(url, filename, report_hook)
return filename return filename