mirror of
https://github.com/release-engineering/repo-autoindex.git
synced 2025-02-23 13:42:52 +00:00
Make it all pass mypy
This commit is contained in:
parent
05c6bd14be
commit
119f0ea9b6
10 changed files with 116 additions and 25 deletions
69
poetry.lock
generated
69
poetry.lock
generated
|
@ -140,6 +140,32 @@ category = "main"
|
|||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "0.961"
|
||||
description = "Optional static typing for Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
|
||||
[package.dependencies]
|
||||
mypy-extensions = ">=0.4.3"
|
||||
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||
typing-extensions = ">=3.10"
|
||||
|
||||
[package.extras]
|
||||
dmypy = ["psutil (>=4.0)"]
|
||||
python2 = ["typed-ast (>=1.4.0,<2)"]
|
||||
reports = ["lxml"]
|
||||
|
||||
[[package]]
|
||||
name = "mypy-extensions"
|
||||
version = "0.4.3"
|
||||
description = "Experimental type system extensions for programs checked with the mypy typechecker."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "21.3"
|
||||
|
@ -211,6 +237,14 @@ category = "dev"
|
|||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.2.0"
|
||||
description = "Backported and Experimental Type Hints for Python 3.7+"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
|
||||
[[package]]
|
||||
name = "yarl"
|
||||
version = "1.7.2"
|
||||
|
@ -226,7 +260,7 @@ multidict = ">=4.0"
|
|||
[metadata]
|
||||
lock-version = "1.1"
|
||||
python-versions = ">=3.9,<4"
|
||||
content-hash = "de28df6c78aea6f3baa2be842fd72f68fff8bd1e3949a7de27fddc90e28e7a20"
|
||||
content-hash = "ec537cb22f7488971d12450524aec30918e6d42c6eac78fcd4022fe8293d935a"
|
||||
|
||||
[metadata.files]
|
||||
aiohttp = [
|
||||
|
@ -507,6 +541,35 @@ multidict = [
|
|||
{file = "multidict-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae"},
|
||||
{file = "multidict-6.0.2.tar.gz", hash = "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"},
|
||||
]
|
||||
mypy = [
|
||||
{file = "mypy-0.961-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:697540876638ce349b01b6786bc6094ccdaba88af446a9abb967293ce6eaa2b0"},
|
||||
{file = "mypy-0.961-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b117650592e1782819829605a193360a08aa99f1fc23d1d71e1a75a142dc7e15"},
|
||||
{file = "mypy-0.961-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bdd5ca340beffb8c44cb9dc26697628d1b88c6bddf5c2f6eb308c46f269bb6f3"},
|
||||
{file = "mypy-0.961-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3e09f1f983a71d0672bbc97ae33ee3709d10c779beb613febc36805a6e28bb4e"},
|
||||
{file = "mypy-0.961-cp310-cp310-win_amd64.whl", hash = "sha256:e999229b9f3198c0c880d5e269f9f8129c8862451ce53a011326cad38b9ccd24"},
|
||||
{file = "mypy-0.961-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b24be97351084b11582fef18d79004b3e4db572219deee0212078f7cf6352723"},
|
||||
{file = "mypy-0.961-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f4a21d01fc0ba4e31d82f0fff195682e29f9401a8bdb7173891070eb260aeb3b"},
|
||||
{file = "mypy-0.961-cp36-cp36m-win_amd64.whl", hash = "sha256:439c726a3b3da7ca84a0199a8ab444cd8896d95012c4a6c4a0d808e3147abf5d"},
|
||||
{file = "mypy-0.961-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5a0b53747f713f490affdceef835d8f0cb7285187a6a44c33821b6d1f46ed813"},
|
||||
{file = "mypy-0.961-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e9f70df36405c25cc530a86eeda1e0867863d9471fe76d1273c783df3d35c2e"},
|
||||
{file = "mypy-0.961-cp37-cp37m-win_amd64.whl", hash = "sha256:b88f784e9e35dcaa075519096dc947a388319cb86811b6af621e3523980f1c8a"},
|
||||
{file = "mypy-0.961-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d5aaf1edaa7692490f72bdb9fbd941fbf2e201713523bdb3f4038be0af8846c6"},
|
||||
{file = "mypy-0.961-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9f5f5a74085d9a81a1f9c78081d60a0040c3efb3f28e5c9912b900adf59a16e6"},
|
||||
{file = "mypy-0.961-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f4b794db44168a4fc886e3450201365c9526a522c46ba089b55e1f11c163750d"},
|
||||
{file = "mypy-0.961-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:64759a273d590040a592e0f4186539858c948302c653c2eac840c7a3cd29e51b"},
|
||||
{file = "mypy-0.961-cp38-cp38-win_amd64.whl", hash = "sha256:63e85a03770ebf403291ec50097954cc5caf2a9205c888ce3a61bd3f82e17569"},
|
||||
{file = "mypy-0.961-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f1332964963d4832a94bebc10f13d3279be3ce8f6c64da563d6ee6e2eeda932"},
|
||||
{file = "mypy-0.961-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:006be38474216b833eca29ff6b73e143386f352e10e9c2fbe76aa8549e5554f5"},
|
||||
{file = "mypy-0.961-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9940e6916ed9371809b35b2154baf1f684acba935cd09928952310fbddaba648"},
|
||||
{file = "mypy-0.961-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a5ea0875a049de1b63b972456542f04643daf320d27dc592d7c3d9cd5d9bf950"},
|
||||
{file = "mypy-0.961-cp39-cp39-win_amd64.whl", hash = "sha256:1ece702f29270ec6af25db8cf6185c04c02311c6bb21a69f423d40e527b75c56"},
|
||||
{file = "mypy-0.961-py3-none-any.whl", hash = "sha256:03c6cc893e7563e7b2949b969e63f02c000b32502a1b4d1314cabe391aa87d66"},
|
||||
{file = "mypy-0.961.tar.gz", hash = "sha256:f730d56cb924d371c26b8eaddeea3cc07d78ff51c521c6d04899ac6904b75492"},
|
||||
]
|
||||
mypy-extensions = [
|
||||
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
|
||||
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
|
||||
]
|
||||
packaging = [
|
||||
{file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
|
||||
{file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
|
||||
|
@ -531,6 +594,10 @@ tomli = [
|
|||
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
|
||||
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
|
||||
]
|
||||
typing-extensions = [
|
||||
{file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"},
|
||||
{file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"},
|
||||
]
|
||||
yarl = [
|
||||
{file = "yarl-1.7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2a8508f7350512434e41065684076f640ecce176d262a7d54f0da41d99c5a95"},
|
||||
{file = "yarl-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da6df107b9ccfe52d3a48165e48d72db0eca3e3029b5b8cb4fe6ee3cb870ba8b"},
|
||||
|
|
|
@ -13,6 +13,7 @@ Jinja2 = ">=3.1.2"
|
|||
|
||||
[tool.poetry.dev-dependencies]
|
||||
pytest = ">=7.1.2"
|
||||
mypy = ">=0.961"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
"repo-autoindex" = "repo_autoindex.cmd:entrypoint"
|
||||
|
|
|
@ -1,2 +1,4 @@
|
|||
from .api import autoindex
|
||||
from .base import Fetcher, GeneratedIndex
|
||||
|
||||
__all__ = ["autoindex", "Fetcher", "GeneratedIndex"]
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import gzip
|
||||
import logging
|
||||
from collections.abc import AsyncGenerator
|
||||
from typing import Optional
|
||||
|
||||
import aiohttp
|
||||
|
||||
|
@ -35,7 +36,7 @@ def http_fetcher(session: aiohttp.ClientSession) -> Fetcher:
|
|||
async def autoindex(
|
||||
url: str,
|
||||
*,
|
||||
fetcher: Fetcher = None,
|
||||
fetcher: Optional[Fetcher] = None,
|
||||
index_href_suffix: str = "",
|
||||
) -> AsyncGenerator[GeneratedIndex, None]:
|
||||
if fetcher is None:
|
||||
|
|
|
@ -46,7 +46,7 @@ class Repo(ABC):
|
|||
self.fetcher = fetcher
|
||||
|
||||
@abstractmethod
|
||||
async def render_index(
|
||||
def render_index(
|
||||
self, index_href_suffix: str
|
||||
) -> AsyncGenerator[GeneratedIndex, None]:
|
||||
pass
|
||||
|
|
|
@ -3,14 +3,13 @@ import asyncio
|
|||
import gzip
|
||||
import logging
|
||||
import os
|
||||
from collections.abc import AsyncGenerator
|
||||
|
||||
from repo_autoindex import autoindex
|
||||
|
||||
LOG = logging.getLogger("autoindex")
|
||||
|
||||
|
||||
async def dump_autoindices(args):
|
||||
async def dump_autoindices(args: argparse.Namespace) -> None:
|
||||
index_filename = args.index_filename
|
||||
async for index in autoindex(args.url, index_href_suffix=index_filename):
|
||||
os.makedirs(index.relative_dir or ".", exist_ok=True)
|
||||
|
|
|
@ -10,7 +10,7 @@ TEMPLATE_DIR = os.path.join(os.path.dirname(__file__), "templates")
|
|||
|
||||
|
||||
class TemplateContext:
|
||||
def __init__(self):
|
||||
def __init__(self) -> None:
|
||||
self.env = jinja2.Environment(
|
||||
autoescape=True, loader=jinja2.FileSystemLoader(TEMPLATE_DIR)
|
||||
)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from dataclasses import dataclass, field, replace
|
||||
from collections.abc import Iterable
|
||||
|
||||
from .base import ICON_FOLDER, IndexEntry
|
||||
|
||||
|
@ -11,7 +12,9 @@ class TreeNode:
|
|||
|
||||
|
||||
def treeify(
|
||||
all_entries: list[IndexEntry], relative_dir: str = "", index_href_suffix: str = ""
|
||||
all_entries: Iterable[IndexEntry],
|
||||
relative_dir: str = "",
|
||||
index_href_suffix: str = "",
|
||||
) -> TreeNode:
|
||||
out = TreeNode(relative_dir=relative_dir)
|
||||
|
||||
|
@ -24,7 +27,7 @@ def treeify(
|
|||
)
|
||||
)
|
||||
|
||||
entries_by_leading_dir = {}
|
||||
entries_by_leading_dir: dict[str, list[IndexEntry]] = {}
|
||||
for entry in all_entries:
|
||||
subdir = entry.href.removeprefix(relative_dir + "/")
|
||||
components = subdir.split("/", 1)
|
||||
|
|
|
@ -7,16 +7,28 @@ from typing import Optional, Type
|
|||
from xml.dom.minidom import Element
|
||||
from xml.dom.pulldom import END_ELEMENT, START_ELEMENT, DOMEventStream
|
||||
|
||||
from defusedxml import pulldom
|
||||
from defusedxml import pulldom # type: ignore
|
||||
|
||||
from .base import (ICON_FOLDER, ICON_PACKAGE, Fetcher, GeneratedIndex,
|
||||
IndexEntry, Repo)
|
||||
from .base import ICON_FOLDER, ICON_PACKAGE, Fetcher, GeneratedIndex, IndexEntry, Repo
|
||||
from .template import TemplateContext
|
||||
from .tree import treeify
|
||||
|
||||
LOG = logging.getLogger("autoindex")
|
||||
|
||||
|
||||
def get_tag(elem: Element, name: str) -> Element:
|
||||
elems: list[Element] = elem.getElementsByTagName(name) # type: ignore
|
||||
return elems[0]
|
||||
|
||||
|
||||
def get_text_tag(elem: Element, name: str) -> str:
|
||||
tagnode = get_tag(elem, name)
|
||||
child = tagnode.firstChild
|
||||
# TODO: raise proper error if missing
|
||||
assert child
|
||||
return str(child.toxml())
|
||||
|
||||
|
||||
@dataclass
|
||||
class Package:
|
||||
href: str
|
||||
|
@ -26,10 +38,10 @@ class Package:
|
|||
@classmethod
|
||||
def from_element(cls, elem: Element) -> "Package":
|
||||
return cls(
|
||||
href=elem.getElementsByTagName("location")[0].attributes["href"].value,
|
||||
href=get_tag(elem, "location").attributes["href"].value,
|
||||
# TODO: tolerate some of these being absent or wrong.
|
||||
time=elem.getElementsByTagName("time")[0].attributes["file"].value,
|
||||
size=elem.getElementsByTagName("size")[0].attributes["package"].value,
|
||||
time=get_tag(elem, "time").attributes["file"].value,
|
||||
size=get_tag(elem, "size").attributes["package"].value,
|
||||
)
|
||||
|
||||
@property
|
||||
|
@ -45,7 +57,7 @@ class Package:
|
|||
|
||||
def pulldom_elements(
|
||||
xml_str: str, path_matcher, attr_matcher=lambda _: True
|
||||
) -> Generator[Element]:
|
||||
) -> Generator[Element, None, None]:
|
||||
stream = pulldom.parseString(xml_str)
|
||||
current_path = []
|
||||
for event, node in stream:
|
||||
|
@ -91,8 +103,11 @@ class YumRepo(Repo):
|
|||
)
|
||||
)
|
||||
if len(revision_nodes) == 1:
|
||||
timestamp_node = revision_nodes[0].firstChild
|
||||
# TODO: raise proper error
|
||||
assert timestamp_node
|
||||
time = datetime.datetime.utcfromtimestamp(
|
||||
float(revision_nodes[0].firstChild.toxml())
|
||||
timestamp_node.toxml()
|
||||
).isoformat()
|
||||
|
||||
out.append(
|
||||
|
@ -100,7 +115,7 @@ class YumRepo(Repo):
|
|||
href="repodata/repomd.xml",
|
||||
text="repomd.xml",
|
||||
time=time,
|
||||
size=size,
|
||||
size=str(size),
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -111,14 +126,14 @@ class YumRepo(Repo):
|
|||
attr_matcher=lambda attrs: attrs.get("type"),
|
||||
)
|
||||
)
|
||||
data_nodes.sort(key=lambda node: node.attributes["type"].value)
|
||||
data_nodes.sort(key=lambda node: str(node.attributes["type"].value))
|
||||
|
||||
for node in data_nodes:
|
||||
href = node.getElementsByTagName("location")[0].attributes["href"].value
|
||||
href = get_tag(node, "location").attributes["href"].value
|
||||
basename = os.path.basename(href)
|
||||
timestamp = node.getElementsByTagName("timestamp")[0].firstChild.toxml()
|
||||
timestamp = get_text_tag(node, "timestamp")
|
||||
time = datetime.datetime.utcfromtimestamp(float(timestamp)).isoformat()
|
||||
size = int(node.getElementsByTagName("size")[0].firstChild.toxml())
|
||||
size = int(get_text_tag(node, "size"))
|
||||
|
||||
out.append(
|
||||
IndexEntry(
|
||||
|
@ -144,12 +159,15 @@ class YumRepo(Repo):
|
|||
assert len(primary_nodes) == 1
|
||||
primary_node = primary_nodes[0]
|
||||
|
||||
location = primary_node.getElementsByTagName("location")[0]
|
||||
location = get_tag(primary_node, "location")
|
||||
href = location.attributes["href"].value
|
||||
|
||||
primary_url = "/".join([self.base_url, href])
|
||||
primary_xml = await self.fetcher(primary_url)
|
||||
|
||||
# TODO: raise proper error if missing
|
||||
assert primary_xml
|
||||
|
||||
return sorted(
|
||||
[p.index_entry for p in self.__packages_from_primary(primary_xml)],
|
||||
key=lambda e: e.text,
|
||||
|
@ -175,7 +193,7 @@ class YumRepo(Repo):
|
|||
self,
|
||||
entries: Iterable[IndexEntry],
|
||||
index_href_suffix: str,
|
||||
) -> Generator[GeneratedIndex, None]:
|
||||
) -> Generator[GeneratedIndex, None, None]:
|
||||
ctx = TemplateContext()
|
||||
nodes = [treeify(entries, index_href_suffix=index_href_suffix)]
|
||||
while nodes:
|
||||
|
@ -197,7 +215,7 @@ class YumRepo(Repo):
|
|||
|
||||
if repomd_xml is None:
|
||||
# not yum repo
|
||||
return
|
||||
return None
|
||||
|
||||
# it is a yum repo
|
||||
return cls(url, repomd_xml, fetcher)
|
||||
|
|
2
tox.ini
2
tox.ini
|
@ -1,6 +1,6 @@
|
|||
[tox]
|
||||
isolated_build = True
|
||||
envlist = py39,docs,precommit,mypy
|
||||
envlist = py39,precommit,mypy
|
||||
envdir = {toxworkdir}/poetry
|
||||
|
||||
[testenv]
|
||||
|
|
Loading…
Add table
Reference in a new issue