mirror of
https://github.com/xtream1101/humblebundle-downloader.git
synced 2024-11-20 08:49:17 +01:00
flake8 clean up
This commit is contained in:
parent
e6817cc1de
commit
6fa1a54228
@ -3,6 +3,7 @@ image: python:3
|
|||||||
stages:
|
stages:
|
||||||
- test
|
- test
|
||||||
- release
|
- release
|
||||||
|
- tag
|
||||||
|
|
||||||
flake8:
|
flake8:
|
||||||
stage: test
|
stage: test
|
||||||
@ -27,7 +28,7 @@ release-package:
|
|||||||
git remote set-url origin "https://gitlab-ci-token:${CI_TAG_UPLOAD_TOKEN}@${url_host}"
|
git remote set-url origin "https://gitlab-ci-token:${CI_TAG_UPLOAD_TOKEN}@${url_host}"
|
||||||
|
|
||||||
tag:
|
tag:
|
||||||
stage: release
|
stage: tag
|
||||||
only:
|
only:
|
||||||
- master
|
- master
|
||||||
script:
|
script:
|
||||||
|
@ -19,27 +19,40 @@ def cli():
|
|||||||
###
|
###
|
||||||
# Generate cookie
|
# Generate cookie
|
||||||
###
|
###
|
||||||
parser_gencookie = subparsers.add_parser('gen-cookie',
|
parser_gencookie = subparsers.add_parser(
|
||||||
help="Generate cookie used to access your library")
|
'gen-cookie',
|
||||||
parser_gencookie.add_argument('-c', '--cookie-file', type=str,
|
help="Generate cookie used to access your library",
|
||||||
help='Location of the file to store the cookie',
|
)
|
||||||
default='hbd-cookies.txt')
|
parser_gencookie.add_argument(
|
||||||
|
'-c', '--cookie-file', type=str,
|
||||||
|
help="Location of the file to store the cookie",
|
||||||
|
default="hbd-cookies.txt",
|
||||||
|
)
|
||||||
|
|
||||||
###
|
###
|
||||||
# Download Library
|
# Download Library
|
||||||
###
|
###
|
||||||
# TODO: for download: have option to only get types, ebooks, videos, etc do not enforce,
|
# TODO: have option to only get types, ebooks, videos, etc do not enforce,
|
||||||
# but lower and just string match to the type in the api
|
# but lower and just string match to the type in the api
|
||||||
parser_download = subparsers.add_parser('download',
|
parser_download = subparsers.add_parser(
|
||||||
help="Download content in your humble bundle library")
|
'download',
|
||||||
parser_download.add_argument('-c', '--cookie-file', type=str,
|
help="Download content in your humble bundle library",
|
||||||
help='Location of the file to store the cookie',
|
)
|
||||||
default='hbd-cookies.txt')
|
parser_download.add_argument(
|
||||||
parser_download.add_argument('-l', '--library-path', type=str,
|
'-c', '--cookie-file', type=str,
|
||||||
help='Folder to download all content to',
|
help="Location of the file to store the cookie",
|
||||||
required=True)
|
default="hbd-cookies.txt",
|
||||||
parser_download.add_argument('--progress', action='store_true',
|
)
|
||||||
help="Display progress bar for downloads")
|
parser_download.add_argument(
|
||||||
|
'-l', '--library-path', type=str,
|
||||||
|
help="Folder to download all content to",
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
parser_download.add_argument(
|
||||||
|
'--progress',
|
||||||
|
action='store_true',
|
||||||
|
help="Display progress bar for downloads",
|
||||||
|
)
|
||||||
|
|
||||||
cli_args = parser.parse_args()
|
cli_args = parser.parse_args()
|
||||||
|
|
||||||
@ -49,5 +62,8 @@ def cli():
|
|||||||
|
|
||||||
elif cli_args.action == 'download':
|
elif cli_args.action == 'download':
|
||||||
from . import download_library
|
from . import download_library
|
||||||
download_library(cli_args.cookie_file, cli_args.library_path,
|
download_library(
|
||||||
progress_bar=cli_args.progress)
|
cli_args.cookie_file,
|
||||||
|
cli_args.library_path,
|
||||||
|
progress_bar=cli_args.progress
|
||||||
|
)
|
||||||
|
@ -9,7 +9,12 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
def _clean_name(dirty_str):
|
def _clean_name(dirty_str):
|
||||||
allowed_chars = (' ', '_', '.', '-', ':', '[', ']')
|
allowed_chars = (' ', '_', '.', '-', ':', '[', ']')
|
||||||
return "".join([c for c in dirty_str.replace('+', '_') if c.isalpha() or c.isdigit() or c in allowed_chars]).strip()
|
clean = []
|
||||||
|
for c in dirty_str.replace('+', '_'):
|
||||||
|
if c.isalpha() or c.isdigit() or c in allowed_chars:
|
||||||
|
clean.append(c)
|
||||||
|
|
||||||
|
return "".join(c.strip())
|
||||||
|
|
||||||
|
|
||||||
def download_library(cookie_path, library_path, progress_bar=False):
|
def download_library(cookie_path, library_path, progress_bar=False):
|
||||||
@ -29,9 +34,12 @@ def download_library(cookie_path, library_path, progress_bar=False):
|
|||||||
headers={'cookie': account_cookies})
|
headers={'cookie': account_cookies})
|
||||||
logger.debug(f"Library request: {library_r}")
|
logger.debug(f"Library request: {library_r}")
|
||||||
library_page = parsel.Selector(text=library_r.text)
|
library_page = parsel.Selector(text=library_r.text)
|
||||||
|
orders_json = json.loads(library_page.css('#user-home-json-data')
|
||||||
|
.xpath('string()').extract_first())
|
||||||
|
|
||||||
for order_id in json.loads(library_page.css('#user-home-json-data').xpath('string()').extract_first())['gamekeys']:
|
for order_id in orders_json['gamekeys']:
|
||||||
order_r = requests.get(f'https://www.humblebundle.com/api/v1/order/{order_id}?all_tpkds=true',
|
order_url = f'https://www.humblebundle.com/api/v1/order/{order_id}?all_tpkds=true' # noqa: E501
|
||||||
|
order_r = requests.get(order_url,
|
||||||
headers={'cookie': account_cookies})
|
headers={'cookie': account_cookies})
|
||||||
logger.debug(f"Order request: {order_r}")
|
logger.debug(f"Order request: {order_r}")
|
||||||
order = order_r.json()
|
order = order_r.json()
|
||||||
@ -41,8 +49,11 @@ def download_library(cookie_path, library_path, progress_bar=False):
|
|||||||
item_title = _clean_name(item['human_name'])
|
item_title = _clean_name(item['human_name'])
|
||||||
# Get all types of download for a product
|
# Get all types of download for a product
|
||||||
for download_type in item['downloads']:
|
for download_type in item['downloads']:
|
||||||
# platform = download_type['platform'] # Type of product, ebook, videos, etc...
|
# Type of product, ebook, videos, etc...
|
||||||
item_folder = os.path.join(library_path, bundle_title, item_title)
|
# platform = download_type['platform']
|
||||||
|
item_folder = os.path.join(
|
||||||
|
library_path, bundle_title, item_title
|
||||||
|
)
|
||||||
|
|
||||||
# Create directory to save the files to
|
# Create directory to save the files to
|
||||||
try: os.makedirs(item_folder) # noqa: E701
|
try: os.makedirs(item_folder) # noqa: E701
|
||||||
@ -55,7 +66,7 @@ def download_library(cookie_path, library_path, progress_bar=False):
|
|||||||
filename = os.path.join(item_folder, f"{item_title}.{ext}")
|
filename = os.path.join(item_folder, f"{item_title}.{ext}")
|
||||||
item_r = requests.get(url, stream=True)
|
item_r = requests.get(url, stream=True)
|
||||||
logger.debug(f"Item request: {item_r}, Url: {url}")
|
logger.debug(f"Item request: {item_r}, Url: {url}")
|
||||||
# Not sure which value will be best to use, so save them all for now
|
# Not sure which value will be best to use, so use them all
|
||||||
file_info = {
|
file_info = {
|
||||||
'md5': file_type['md5'],
|
'md5': file_type['md5'],
|
||||||
'sha1': file_type['sha1'],
|
'sha1': file_type['sha1'],
|
||||||
@ -74,17 +85,24 @@ def download_library(cookie_path, library_path, progress_bar=False):
|
|||||||
else:
|
else:
|
||||||
dl = 0
|
dl = 0
|
||||||
total_length = int(total_length)
|
total_length = int(total_length)
|
||||||
for data in item_r.iter_content(chunk_size=4096):
|
for data in item_r.iter_content(chunk_size=4096): # noqa E501
|
||||||
dl += len(data)
|
dl += len(data)
|
||||||
outfile.write(data)
|
outfile.write(data)
|
||||||
pb_width = 50
|
pb_width = 50
|
||||||
done = int(pb_width * dl / total_length)
|
done = int(pb_width * dl / total_length)
|
||||||
if progress_bar: print(f"Downloading: {item_title}.{ext}: {int(done * (100 / pb_width))}% [{'=' * done}{' ' * (pb_width-done)}]", end='\r') # noqa: E501, E701
|
if progress_bar:
|
||||||
|
print(f"Downloading: {item_title}.{ext}: {int(done * (100 / pb_width))}% [{'=' * done}{' ' * (pb_width-done)}]", end='\r') # noqa: E501, E701
|
||||||
|
|
||||||
if progress_bar:
|
if progress_bar:
|
||||||
print() # print new line so next progress bar is on its own line
|
# print new line so next progress bar
|
||||||
|
# is on its own line
|
||||||
|
print()
|
||||||
|
|
||||||
cache_data[filename] = file_info
|
cache_data[filename] = file_info
|
||||||
# Update cache file with newest data so if the script quits it can keep track of the progress
|
# Update cache file with newest data so if the script
|
||||||
|
# quits it can keep track of the progress
|
||||||
with open(cache_file, 'w') as outfile:
|
with open(cache_file, 'w') as outfile:
|
||||||
json.dump(cache_data, outfile, sort_keys=True, indent=4)
|
json.dump(
|
||||||
|
cache_data, outfile,
|
||||||
|
sort_keys=True, indent=4,
|
||||||
|
)
|
||||||
|
@ -20,7 +20,8 @@ def generate_cookie(cookie_path):
|
|||||||
gdd = ChromeDriverDownloader()
|
gdd = ChromeDriverDownloader()
|
||||||
chrome_driver = gdd.download_and_install()
|
chrome_driver = gdd.download_and_install()
|
||||||
|
|
||||||
# TODO: load previous cookies so it does not ask to re verify using an email code each time
|
# TODO: load previous cookies so it does not ask to re verify using an
|
||||||
|
# email code each time
|
||||||
driver = webdriver.Chrome(executable_path=chrome_driver[1])
|
driver = webdriver.Chrome(executable_path=chrome_driver[1])
|
||||||
|
|
||||||
driver.get('https://www.humblebundle.com/login')
|
driver.get('https://www.humblebundle.com/login')
|
||||||
|
Loading…
Reference in New Issue
Block a user