Bump to 0.3.0, bump deps, misc cleanups and type corrections

Some of the misc issues were found with mypy. Not adding it to dev deps
for now as it complains about missing types in libraries and does not
honor noqa where we need it (non-literal TypedDict keys).
This commit is contained in:
Ryszard Knop 2022-06-12 19:31:25 +02:00
parent 4542057654
commit 90346f579a
7 changed files with 24 additions and 23 deletions

View File

@ -1 +1 @@
__version__ = '0.2.1'
__version__ = '0.3.0'

View File

@ -1,5 +1,6 @@
from typing import Optional
import requests
from requests import Session
from urllib3.util.retry import Retry
from requests.adapters import HTTPAdapter
@ -27,7 +28,7 @@ class ItchApiClient:
self.requests.mount("https://", adapter)
self.requests.mount("http://", adapter)
def get(self, endpoint: str, append_api_key: bool = True, **kwargs):
def get(self, endpoint: str, append_api_key: bool = True, **kwargs) -> requests.Response:
if append_api_key:
params = kwargs.get('data') or {}

View File

@ -45,9 +45,9 @@ class GameMetadata(TypedDict, total=False):
author: str
author_url: str
cover_url: str
cover_url: Optional[str]
screenshots: List[str]
description: str
description: Optional[str]
rating: Dict[str, Union[float, int]]
extra: InfoboxMetadata
@ -125,7 +125,7 @@ class GameDownloader:
def extract_metadata(self, game_id: int, url: str, site: BeautifulSoup) -> GameMetadata:
rating_json: Optional[dict] = self.get_rating_json(site)
title = rating_json.get("name")
title = rating_json.get("name") if rating_json else None
description: Optional[str] = self.get_meta(site, property="og:description")
if not description:
@ -150,9 +150,8 @@ class GameDownloader:
infobox = parse_infobox(infobox_div)
for dt in ('created_at', 'updated_at', 'released_at', 'published_at'):
if dt in infobox:
# noinspection PyTypedDict
metadata[dt] = infobox[dt].isoformat()
del infobox[dt]
metadata[dt] = infobox[dt].isoformat() # noqa (non-literal TypedDict keys)
del infobox[dt] # noqa (non-literal TypedDict keys)
if 'author' in infobox:
metadata['author'] = infobox['author']['author']
@ -166,7 +165,7 @@ class GameDownloader:
metadata['extra'] = infobox
agg_rating = rating_json.get('aggregateRating')
agg_rating = rating_json.get('aggregateRating') if rating_json else None
if agg_rating:
try:
metadata['rating'] = {
@ -269,8 +268,8 @@ class GameDownloader:
upload_is_external = upload['storage'] == 'external'
logging.debug("Downloading '%s' (%d), %s",
file_name, upload_id,
f"{file_size} bytes" if file_size is not None else "unknown size")
file_name, upload_id,
f"{file_size} bytes" if file_size is not None else "unknown size")
target_path = None if upload_is_external else os.path.join(paths['files'], file_name)
@ -286,8 +285,8 @@ class GameDownloader:
try:
downloaded_file_size = os.stat(target_path).st_size
if file_size is not None and downloaded_file_size != file_size:
errors.append(f"File size is {downloaded_file_size}, but expected {file_size} for upload {upload}")
if target_path is not None and file_size is not None and downloaded_file_size != file_size:
errors.append(f"File size is {downloaded_file_size}, expected {file_size} for upload {upload}")
except FileNotFoundError:
errors.append(f"Downloaded file not found for upload {upload}")
@ -314,9 +313,9 @@ class GameDownloader:
except Exception as e:
errors.append(f"Screenshot download failed (this is not fatal): {e}")
if metadata.get('cover_url'):
cover_url = metadata.get('cover_url')
if cover_url:
try:
cover_url = metadata['cover_url']
self.download_file(cover_url, paths['cover'] + os.path.splitext(cover_url)[-1], credentials={})
except Exception as e:
errors.append(f"Cover art download failed (this is not fatal): {e}")

View File

@ -2,7 +2,7 @@ import json
import os.path
import logging
import urllib.parse
from typing import List, Optional
from typing import List, Set, Optional
from bs4 import BeautifulSoup
@ -49,7 +49,7 @@ def get_jobs_for_browse_url(url: str, client: ItchApiClient) -> List[str]:
.xml?page=N suffix and iterate until we've caught 'em all.
"""
page = 1
found_urls = set()
found_urls: Set[str] = set()
logging.info(f"Scraping game URLs from RSS feeds for %s", url)
while True:
@ -189,3 +189,5 @@ def get_jobs_for_url_or_path(path_or_url: str, settings: Settings) -> List[str]:
return get_jobs_for_itch_url(path_or_url, client)
elif os.path.isfile(path_or_url):
return get_jobs_for_path(path_or_url)
else:
raise NotImplementedError(f"Cannot handle path or URL: {path_or_url}")

View File

@ -103,7 +103,7 @@ def parse_tr(name: str, content: BeautifulSoup) -> Optional[Tuple[str, Any]]:
raise NotImplementedError(f"Unknown infobox block name '{name}' - please file a new itch-dl issue.")
def parse_infobox(infobox: BeautifulSoup) -> dict:
def parse_infobox(infobox: BeautifulSoup) -> InfoboxMetadata:
"""Feed it <div class="game_info_panel_widget">, out goes a dict
of parsed metadata blocks."""
meta = InfoboxMetadata()
@ -118,7 +118,6 @@ def parse_infobox(infobox: BeautifulSoup) -> dict:
parsed_block = parse_tr(name, content_td)
if parsed_block:
# noinspection PyTypedDict
meta[parsed_block[0]] = parsed_block[1]
meta[parsed_block[0]] = parsed_block[1] # noqa (non-literal TypedDict keys)
return meta

View File

@ -6,7 +6,7 @@ from .api import ItchApiClient
def get_download_keys(client: ItchApiClient) -> Dict[int, str]:
logging.info("Fetching all download keys...")
download_keys = {}
download_keys: Dict[int, str] = {}
page = 1
while True:

View File

@ -1,7 +1,7 @@
[tool.poetry]
name = "itch-dl"
packages = [{ include = "itch_dl" }]
version = "0.2.1"
version = "0.3.0"
description = "itch.io bulk game downloader"
homepage = "https://github.com/DragoonAethis/itch-dl"
repository = "https://github.com/DragoonAethis/itch-dl"
@ -26,7 +26,7 @@ classifiers = [
python = "^3.8"
tqdm = "^4.64.0"
urllib3 = "^1.26.9"
requests = "^2.27.1"
requests = "^2.28.0"
python-slugify = "^6.1.2"
beautifulsoup4 = "^4.11.1"
lxml = "^4.9.0"