forked from Mirrors/humblebundle-downloader
Added ci for releasing new versions to pypi
This commit is contained in:
2
humblebundle_downloader/__init__.py
Normal file
2
humblebundle_downloader/__init__.py
Normal file
@@ -0,0 +1,2 @@
|
||||
from .generate_cookie import generate_cookie # noqa: F401
|
||||
from .download_library import download_library # noqa: F401
|
||||
53
humblebundle_downloader/cli.py
Normal file
53
humblebundle_downloader/cli.py
Normal file
@@ -0,0 +1,53 @@
|
||||
import os
|
||||
import logging
|
||||
import argparse
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
LOG_LEVEL = os.environ.get('HBD_LOGLEVEL', 'INFO').upper()
|
||||
logging.basicConfig(
|
||||
level=LOG_LEVEL,
|
||||
format='%(message)s',
|
||||
)
|
||||
|
||||
|
||||
def cli():
|
||||
parser = argparse.ArgumentParser()
|
||||
subparsers = parser.add_subparsers(dest='action')
|
||||
subparsers.required = True
|
||||
|
||||
###
|
||||
# Generate cookie
|
||||
###
|
||||
parser_gencookie = subparsers.add_parser('gen-cookie',
|
||||
help="Generate cookie used to access your library")
|
||||
parser_gencookie.add_argument('-c', '--cookie-file', type=str,
|
||||
help='Location of the file to store the cookie',
|
||||
default='hbd-cookies.txt')
|
||||
|
||||
###
|
||||
# Download Library
|
||||
###
|
||||
# TODO: for download: have option to only get types, ebooks, videos, etc do not enforce,
|
||||
# but lower and just string match to the type in the api
|
||||
parser_download = subparsers.add_parser('download',
|
||||
help="Download content in your humble bundle library")
|
||||
parser_download.add_argument('-c', '--cookie-file', type=str,
|
||||
help='Location of the file to store the cookie',
|
||||
default='hbd-cookies.txt')
|
||||
parser_download.add_argument('-l', '--library-path', type=str,
|
||||
help='Folder to download all content to',
|
||||
required=True)
|
||||
parser_download.add_argument('--progress', action='store_true',
|
||||
help="Display progress bar for downloads")
|
||||
|
||||
cli_args = parser.parse_args()
|
||||
|
||||
if cli_args.action == 'gen-cookie':
|
||||
from . import generate_cookie
|
||||
generate_cookie(cli_args.cookie_file)
|
||||
|
||||
elif cli_args.action == 'download':
|
||||
from . import download_library
|
||||
download_library(cli_args.cookie_file, cli_args.library_path,
|
||||
progress_bar=cli_args.progress)
|
||||
90
humblebundle_downloader/download_library.py
Normal file
90
humblebundle_downloader/download_library.py
Normal file
@@ -0,0 +1,90 @@
|
||||
import os
|
||||
import json
|
||||
import parsel
|
||||
import logging
|
||||
import requests
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _clean_name(dirty_str):
|
||||
allowed_chars = (' ', '_', '.', '-', ':', '[', ']')
|
||||
return "".join([c for c in dirty_str.replace('+', '_') if c.isalpha() or c.isdigit() or c in allowed_chars]).strip()
|
||||
|
||||
|
||||
def download_library(cookie_path, library_path, progress_bar=False):
|
||||
# Load cookies
|
||||
with open(cookie_path, 'r') as f:
|
||||
account_cookies = f.read()
|
||||
|
||||
cache_file = os.path.join(library_path, '.cache.json')
|
||||
|
||||
try:
|
||||
with open(cache_file, 'r') as f:
|
||||
cache_data = json.load(f)
|
||||
except FileNotFoundError:
|
||||
cache_data = {}
|
||||
|
||||
library_r = requests.get('https://www.humblebundle.com/home/library',
|
||||
headers={'cookie': account_cookies})
|
||||
logger.debug(f"Library request: {library_r}")
|
||||
library_page = parsel.Selector(text=library_r.text)
|
||||
|
||||
for order_id in json.loads(library_page.css('#user-home-json-data').xpath('string()').extract_first())['gamekeys']:
|
||||
order_r = requests.get(f'https://www.humblebundle.com/api/v1/order/{order_id}?all_tpkds=true',
|
||||
headers={'cookie': account_cookies})
|
||||
logger.debug(f"Order request: {order_r}")
|
||||
order = order_r.json()
|
||||
bundle_title = _clean_name(order['product']['human_name'])
|
||||
logger.info(f"Checking bundle: {bundle_title}")
|
||||
for item in order['subproducts']:
|
||||
item_title = _clean_name(item['human_name'])
|
||||
# Get all types of download for a product
|
||||
for download_type in item['downloads']:
|
||||
# platform = download_type['platform'] # Type of product, ebook, videos, etc...
|
||||
item_folder = os.path.join(library_path, bundle_title, item_title)
|
||||
|
||||
# Create directory to save the files to
|
||||
try: os.makedirs(item_folder) # noqa: E701
|
||||
except OSError: pass # noqa: E701
|
||||
|
||||
# Download each file type of a product
|
||||
for file_type in download_type['download_struct']:
|
||||
url = file_type['url']['web']
|
||||
ext = url.split('?')[0].split('.')[-1]
|
||||
filename = os.path.join(item_folder, f"{item_title}.{ext}")
|
||||
item_r = requests.get(url, stream=True)
|
||||
logger.debug(f"Item request: {item_r}, Url: {url}")
|
||||
# Not sure which value will be best to use, so save them all for now
|
||||
file_info = {
|
||||
'md5': file_type['md5'],
|
||||
'sha1': file_type['sha1'],
|
||||
'url_last_modified': item_r.headers['Last-Modified'],
|
||||
'url_etag': item_r.headers['ETag'][1:-1],
|
||||
'url_crc': item_r.headers['X-HW-Cache-CRC'],
|
||||
}
|
||||
if file_info != cache_data.get(filename, {}):
|
||||
if not progress_bar:
|
||||
logger.info(f"Downloading: {item_title}.{ext}")
|
||||
|
||||
with open(filename, 'wb') as outfile:
|
||||
total_length = item_r.headers.get('content-length')
|
||||
if total_length is None: # no content length header
|
||||
outfile.write(item_r.content)
|
||||
else:
|
||||
dl = 0
|
||||
total_length = int(total_length)
|
||||
for data in item_r.iter_content(chunk_size=4096):
|
||||
dl += len(data)
|
||||
outfile.write(data)
|
||||
pb_width = 50
|
||||
done = int(pb_width * dl / total_length)
|
||||
if progress_bar: print(f"Downloading: {item_title}.{ext}: {int(done * (100 / pb_width))}% [{'=' * done}{' ' * (pb_width-done)}]", end='\r') # noqa: E501, E701
|
||||
|
||||
if progress_bar:
|
||||
print() # print new line so next progress bar is on its own line
|
||||
|
||||
cache_data[filename] = file_info
|
||||
# Update cache file with newest data so if the script quits it can keep track of the progress
|
||||
with open(cache_file, 'w') as outfile:
|
||||
json.dump(cache_data, outfile, sort_keys=True, indent=4)
|
||||
38
humblebundle_downloader/generate_cookie.py
Normal file
38
humblebundle_downloader/generate_cookie.py
Normal file
@@ -0,0 +1,38 @@
|
||||
import time
|
||||
import logging
|
||||
from selenium import webdriver
|
||||
from webdriverdownloader import ChromeDriverDownloader
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _get_cookie_str(driver):
|
||||
raw_cookies = driver.get_cookies()
|
||||
baked_cookies = ''
|
||||
for cookie in raw_cookies:
|
||||
baked_cookies += f"{cookie['name']}={cookie['value']};"
|
||||
# Remove the trailing ;
|
||||
return baked_cookies[:-1]
|
||||
|
||||
|
||||
def generate_cookie(cookie_path):
|
||||
gdd = ChromeDriverDownloader()
|
||||
chrome_driver = gdd.download_and_install()
|
||||
|
||||
# TODO: load previous cookies so it does not ask to re verify using an email code each time
|
||||
driver = webdriver.Chrome(executable_path=chrome_driver[1])
|
||||
|
||||
driver.get('https://www.humblebundle.com/login')
|
||||
|
||||
while '/login' in driver.current_url:
|
||||
# Waiting for the user to login
|
||||
time.sleep(.25)
|
||||
|
||||
cookie_str = _get_cookie_str(driver)
|
||||
with open(cookie_path, 'w') as f:
|
||||
f.write(cookie_str)
|
||||
|
||||
logger.info(f"Saved cookie to {cookie_path}")
|
||||
|
||||
driver.quit()
|
||||
Reference in New Issue
Block a user