Actually fix the re-download bug

This commit is contained in:
Eddy Hintze 2020-01-25 20:36:04 -05:00
parent 92440de1f7
commit 925a68dae9
3 changed files with 10 additions and 3 deletions

View File

@ -1,6 +1,11 @@
# Change log # Change log
### 0.1.3
- Fixed re-downloading for real this time
- Only use the url last modified time as the check for new versions
### 0.1.2 ### 0.1.2
- Stop using md5 & sha1 hashes to check if file is unique (was creating duplicate downloads of the same file) - Stop using md5 & sha1 hashes to check if file is unique (was creating duplicate downloads of the same file)
- Strip periods from end of directory & file names - Strip periods from end of directory & file names

View File

@ -1 +1 @@
__version__ = '0.1.2' __version__ = '0.1.3'

View File

@ -101,8 +101,6 @@ class DownloadLibrary:
# Not sure which value will be best to use, so use them all # Not sure which value will be best to use, so use them all
file_info = { file_info = {
'url_last_modified': product_r.headers['Last-Modified'], 'url_last_modified': product_r.headers['Last-Modified'],
'url_etag': product_r.headers['ETag'][1:-1],
'url_crc': product_r.headers['X-HW-Cache-CRC'],
} }
cache_file_info = self.cache_data.get(cache_file_key, {}) cache_file_info = self.cache_data.get(cache_file_key, {})
if file_info != cache_file_info: if file_info != cache_file_info:
@ -199,6 +197,10 @@ class DownloadLibrary:
del cache_data[key]['md5'] del cache_data[key]['md5']
if 'sha1' in value: if 'sha1' in value:
del cache_data[key]['sha1'] del cache_data[key]['sha1']
if 'url_etag' in value:
del cache_data[key]['url_etag']
if 'url_crc' in value:
del cache_data[key]['url_crc']
return cache_data return cache_data