heartwood every commit a ring

Rewrite SEO crawler in-process with requests + BeautifulSoup

96f2e4e1 by Isaac Bythewood · 25 days ago

Rewrite SEO crawler in-process with requests + BeautifulSoup

Drop Scrapy for a smaller crawler that runs in the scheduler thread
instead of a subprocess. Adds broken-link, robots/sitemap, accessibility,
content, performance, and mixed-content checks with severity levels, and
fixes the scheduler JOIN_TIMEOUT mismatch that was abandoning slow crawls.
added crawler/checks.py
@@ -0,0 +1,760 @@"""SEO / accessibility / performance / content / security checks.Every check takes a single `ctx` dict and returns a list of insight dicts.Insight shape: {url, issue, item, type, severity}"""from urllib.parse import urlparsefrom .fetcher import same_siteTYPE_SEO = "seo"TYPE_LINKS = "links"TYPE_ACCESSIBILITY = "accessibility"TYPE_CONTENT = "content"TYPE_PERFORMANCE = "performance"TYPE_SECURITY = "security"SEVERITY_ERROR = "error"SEVERITY_WARNING = "warning"SEVERITY_INFO = "info"REDIRECT_CODES = {301, 302, 303, 307, 308}def _insight(url, issue, type_, severity, item=""):    return {        "url": url,        "issue": issue,        "item": item,        "type": type_,        "severity": severity,    }def _normalize(s):    return " ".join(s.lower().split()) if s else ""def _group_by(pages, field):    seen = {}    for p in pages:        val = p.get(field, "")        if val:            seen.setdefault(_normalize(val), []).append(p)    return seen# ---------- core metadata ----------def check_title_missing(ctx):    return [        _insight(p["url"], "Page has no title", TYPE_SEO, SEVERITY_ERROR)        for p in ctx["html_pages"]        if not p.get("title")    ]def check_title_length(ctx):    out = []    for p in ctx["html_pages"]:        t = p.get("title", "")        if t and not (30 <= len(t) <= 60):            out.append(                _insight(                    p["url"],                    f"Title length is {len(t)} chars (recommended 30-60)",                    TYPE_SEO,                    SEVERITY_WARNING,                    item=t,                )            )    return outdef check_duplicate_titles(ctx):    out = []    for _, group in _group_by(ctx["html_pages"], "title").items():        if len(group) > 1:            for p in group:                out.append(                    _insight(                        p["url"],                        "Duplicate title",                        TYPE_SEO,                        SEVERITY_WARNING,                        item=p["title"],                    )                )    return outdef check_description_missing(ctx):    return [        _insight(p["url"], "Page has no meta description", TYPE_SEO, SEVERITY_ERROR)        for p in ctx["html_pages"]        if not p.get("description")    ]def check_description_length(ctx):    out = []    for p in ctx["html_pages"]:        d = p.get("description", "")        if d and not (70 <= len(d) <= 160):            out.append(                _insight(                    p["url"],                    f"Description length is {len(d)} chars (recommended 70-160)",                    TYPE_SEO,                    SEVERITY_WARNING,                    item=d,                )            )    return outdef check_duplicate_descriptions(ctx):    out = []    for _, group in _group_by(ctx["html_pages"], "description").items():        if len(group) > 1:            for p in group:                out.append(                    _insight(                        p["url"],                        "Duplicate meta description",                        TYPE_SEO,                        SEVERITY_WARNING,                        item=p["description"],                    )                )    return outdef check_h1_missing(ctx):    return [        _insight(p["url"], "Page has no h1", TYPE_SEO, SEVERITY_ERROR)        for p in ctx["html_pages"]        if not p.get("headings", {}).get("h1")    ]def check_h1_multiple(ctx):    out = []    for p in ctx["html_pages"]:        h1s = p.get("headings", {}).get("h1", [])        if len(h1s) > 1:            out.append(                _insight(                    p["url"],                    f"Page has {len(h1s)} h1 tags (expected 1)",                    TYPE_SEO,                    SEVERITY_WARNING,                    item=" | ".join(h1s[:3]),                )            )    return outdef check_h1_length(ctx):    out = []    for p in ctx["html_pages"]:        h1s = p.get("headings", {}).get("h1", [])        if h1s and not (20 <= len(h1s[0]) <= 70):            out.append(                _insight(                    p["url"],                    f"H1 length is {len(h1s[0])} chars (recommended 20-70)",                    TYPE_SEO,                    SEVERITY_WARNING,                    item=h1s[0],                )            )    return outdef check_duplicate_h1s(ctx):    out = []    buckets = {}    for p in ctx["html_pages"]:        h1s = p.get("headings", {}).get("h1", [])        if h1s:            buckets.setdefault(_normalize(h1s[0]), []).append((p["url"], h1s[0]))    for _, group in buckets.items():        if len(group) > 1:            for url, item in group:                out.append(_insight(url, "Duplicate h1", TYPE_SEO, SEVERITY_WARNING, item=item))    return outdef check_heading_hierarchy(ctx):    out = []    for p in ctx["html_pages"]:        h = p.get("headings", {})        levels = [lvl for lvl in range(1, 7) if h.get(f"h{lvl}")]        for i in range(1, len(levels)):            if levels[i] - levels[i - 1] > 1:                out.append(                    _insight(                        p["url"],                        f"Heading hierarchy skips from h{levels[i - 1]} to h{levels[i]}",                        TYPE_SEO,                        SEVERITY_INFO,                    )                )                break    return outdef check_canonical_missing(ctx):    return [        _insight(p["url"], "Page has no canonical URL", TYPE_SEO, SEVERITY_WARNING)        for p in ctx["html_pages"]        if not p.get("canonical")    ]def check_canonical_offdomain(ctx):    out = []    host = ctx["host"]    for p in ctx["html_pages"]:        c = p.get("canonical", "")        if c and not same_site(c, host):            out.append(                _insight(                    p["url"],                    "Canonical URL points off-domain",                    TYPE_SEO,                    SEVERITY_WARNING,                    item=c,                )            )    return outdef check_canonical_broken(ctx):    out = []    status_map = ctx["status_map"]    for p in ctx["html_pages"]:        c = p.get("canonical", "")        if c and c in status_map and status_map[c] != 200:            out.append(                _insight(                    p["url"],                    f"Canonical URL returns {status_map[c]}",                    TYPE_SEO,                    SEVERITY_ERROR,                    item=c,                )            )    return outdef check_robots_meta_noindex(ctx):    out = []    for p in ctx["html_pages"]:        rm = (p.get("robots_meta") or "").lower()        if "noindex" in rm:            out.append(                _insight(                    p["url"],                    "Page has noindex in meta robots tag",                    TYPE_SEO,                    SEVERITY_WARNING,                    item=p.get("robots_meta", ""),                )            )    return outdef check_lang_missing(ctx):    return [        _insight(p["url"], "HTML lang attribute missing", TYPE_SEO, SEVERITY_WARNING)        for p in ctx["html_pages"]        if not p.get("lang")    ]def check_viewport_missing(ctx):    return [        _insight(p["url"], "Viewport meta tag missing (mobile)", TYPE_SEO, SEVERITY_WARNING)        for p in ctx["html_pages"]        if not p.get("viewport")    ]def check_og_incomplete(ctx):    out = []    for p in ctx["html_pages"]:        og = p.get("og") or {}        missing = [k for k in ("title", "description", "image", "url") if not og.get(k)]        if missing:            out.append(                _insight(                    p["url"],                    f"Open Graph tags missing: {', '.join('og:' + m for m in missing)}",                    TYPE_SEO,                    SEVERITY_INFO,                )            )    return outdef check_twitter_card(ctx):    return [        _insight(p["url"], "Twitter card meta tag missing", TYPE_SEO, SEVERITY_INFO)        for p in ctx["html_pages"]        if not (p.get("twitter") or {}).get("card")    ]def check_favicon(ctx):    return [        _insight(p["url"], "Favicon link missing", TYPE_SEO, SEVERITY_INFO)        for p in ctx["html_pages"]        if not p.get("favicon")    ]def check_json_ld_parse_error(ctx):    out = []    for p in ctx["html_pages"]:        for item in p.get("json_ld", []):            if item is None:                out.append(                    _insight(                        p["url"],                        "JSON-LD structured data failed to parse",                        TYPE_SEO,                        SEVERITY_WARNING,                    )                )                break    return out# ---------- links ----------def check_broken_internal_links(ctx):    out = []    reported = set()    status_map = ctx["status_map"]    host = ctx["host"]    for p in ctx["html_pages"]:        for link in p.get("links", []):            lu = link["url"]            if not same_site(lu, host):                continue            status = status_map.get(lu)            if status is None:                continue            if status != 200 and status not in REDIRECT_CODES:                key = (p["url"], lu)                if key in reported:                    continue                reported.add(key)                label = f"status {status}" if status else "unreachable"                out.append(                    _insight(                        p["url"],                        f"Broken internal link ({label})",                        TYPE_LINKS,                        SEVERITY_ERROR,                        item=lu,                    )                )    return outdef check_broken_external_links(ctx):    out = []    reported = set()    host = ctx["host"]    ext = ctx["external_link_status"]    for p in ctx["html_pages"]:        for link in p.get("links", []):            lu = link["url"]            if same_site(lu, host):                continue            if lu not in ext:                continue            status = ext[lu]            if status == 0 or status >= 400:                key = (p["url"], lu)                if key in reported:                    continue                reported.add(key)                label = f"status {status}" if status else "unreachable"                out.append(                    _insight(                        p["url"],                        f"Broken external link ({label})",                        TYPE_LINKS,                        SEVERITY_WARNING,                        item=lu,                    )                )    return outdef check_redirect_chains(ctx):    out = []    for p in ctx["pages"]:        chain = p.get("redirect_chain") or []        if len(chain) > 2:  # initial + final is fine; more means multiple hops            hops = len(chain) - 1            out.append(                _insight(                    p["url"],                    f"Redirect chain has {hops} hops",                    TYPE_LINKS,                    SEVERITY_INFO,                    item=" -> ".join(str(code) for code, _ in chain),                )            )    return outdef check_nofollow_internal_links(ctx):    out = []    reported = set()    host = ctx["host"]    for p in ctx["html_pages"]:        for link in p.get("links", []):            lu = link["url"]            if not same_site(lu, host):                continue            if "nofollow" in (link.get("rel") or []):                key = (p["url"], lu)                if key in reported:                    continue                reported.add(key)                out.append(                    _insight(                        p["url"],                        "Internal link has rel=nofollow",                        TYPE_LINKS,                        SEVERITY_INFO,                        item=lu,                    )                )    return out# ---------- robots / sitemap ----------def check_robots_missing(ctx):    if not ctx["robots"]["exists"]:        return [            _insight(                ctx["start_url"],                "robots.txt missing",                TYPE_SEO,                SEVERITY_WARNING,                item=ctx["robots"]["url"],            )        ]    return []def check_sitemap_missing(ctx):    if not ctx["sitemap_urls"]:        return [            _insight(                ctx["start_url"],                "sitemap.xml missing or empty",                TYPE_SEO,                SEVERITY_WARNING,            )        ]    return []def check_sitemap_not_in_robots(ctx):    if (        ctx["robots"]["exists"]        and ctx["sitemap_urls"]        and not ctx["robots"].get("references_sitemap")    ):        return [            _insight(                ctx["start_url"],                "robots.txt does not reference a sitemap",                TYPE_SEO,                SEVERITY_INFO,            )        ]    return []def check_sitemap_broken_urls(ctx):    out = []    status_map = ctx["status_map"]    for url in ctx["sitemap_urls"]:        s = status_map.get(url)        if s is not None and s != 200 and s not in REDIRECT_CODES:            out.append(                _insight(                    url,                    f"URL listed in sitemap returns {s}",                    TYPE_SEO,                    SEVERITY_ERROR,                )            )    return outdef check_pages_missing_from_sitemap(ctx):    if not ctx["sitemap_urls"]:        return []    sitemap_set = set(ctx["sitemap_urls"])    out = []    for p in ctx["html_pages"]:        if p["url"] in sitemap_set:            continue        # Ignore pages excluded by meta robots        if "noindex" in (p.get("robots_meta") or "").lower():            continue        out.append(            _insight(                p["url"],                "Page not listed in sitemap",                TYPE_SEO,                SEVERITY_INFO,            )        )    return out# ---------- accessibility ----------def check_images_missing_alt(ctx):    out = []    for p in ctx["html_pages"]:        missing = [img for img in p.get("images", []) if img.get("alt") is None]        if missing:            out.append(                _insight(                    p["url"],                    f"{len(missing)} image(s) missing alt attribute",                    TYPE_ACCESSIBILITY,                    SEVERITY_WARNING,                    item=missing[0].get("src", "")[:160],                )            )    return outdef check_empty_anchor_text(ctx):    out = []    for p in ctx["html_pages"]:        empty = [link for link in p.get("links", []) if not link.get("text")]        if empty:            out.append(                _insight(                    p["url"],                    f"{len(empty)} link(s) have no visible text",                    TYPE_ACCESSIBILITY,                    SEVERITY_INFO,                    item=empty[0].get("url", "")[:160],                )            )    return outdef check_form_inputs_unlabeled(ctx):    out = []    ignore_types = {"hidden", "submit", "button", "reset", "image"}    for p in ctx["html_pages"]:        for form in p.get("forms", []):            label_fors = set(form.get("label_fors", []))            unlabeled = 0            for i in form.get("inputs", []):                if (i.get("type") or "text").lower() in ignore_types:                    continue                if i.get("aria_label"):                    continue                if i.get("id") and i.get("id") in label_fors:                    continue                unlabeled += 1            if unlabeled:                out.append(                    _insight(                        p["url"],                        f"{unlabeled} form input(s) without associated label",                        TYPE_ACCESSIBILITY,                        SEVERITY_WARNING,                        item=form.get("action", ""),                    )                )                break  # one insight per page    return out# ---------- content ----------def check_thin_content(ctx):    out = []    for p in ctx["html_pages"]:        wc = p.get("word_count", 0)        if wc < 300:            out.append(                _insight(                    p["url"],                    f"Thin content ({wc} words)",                    TYPE_CONTENT,                    SEVERITY_WARNING,                )            )    return outdef check_duplicate_content(ctx):    out = []    buckets = {}    for p in ctx["html_pages"]:        th = p.get("text_hash")        if th:            buckets.setdefault(th, []).append(p["url"])    for urls in buckets.values():        if len(urls) > 1:            for u in urls:                other = next((x for x in urls if x != u), urls[0])                out.append(                    _insight(                        u,                        "Page has duplicate visible content with another page",                        TYPE_CONTENT,                        SEVERITY_WARNING,                        item=other,                    )                )    return out# ---------- performance ----------def check_slow_pages(ctx):    out = []    for p in ctx["pages"]:        if not p.get("is_html"):            continue        ms = p.get("elapsed_ms", 0)        if ms > 1000:            out.append(                _insight(                    p["url"],                    f"Slow response ({ms} ms)",                    TYPE_PERFORMANCE,                    SEVERITY_WARNING,                )            )    return outdef check_missing_compression(ctx):    out = []    for p in ctx["html_pages"]:        headers = p.get("headers") or {}        enc = ""        for k, v in headers.items():            if k.lower() == "content-encoding":                enc = (v or "").lower()                break        if not enc:            out.append(                _insight(                    p["url"],                    "Response not compressed (no Content-Encoding header)",                    TYPE_PERFORMANCE,                    SEVERITY_INFO,                )            )    return outdef check_oversized_pages(ctx):    out = []    for p in ctx["pages"]:        size = p.get("bytes", 0)        if size > 500_000:            out.append(                _insight(                    p["url"],                    f"Oversized page ({size // 1024} KB)",                    TYPE_PERFORMANCE,                    SEVERITY_WARNING,                )            )    return out# ---------- security (per-page; SecurityMixin covers site-level) ----------def check_mixed_content(ctx):    out = []    for p in ctx["html_pages"]:        if not p["url"].startswith("https://"):            continue        http_resources = [r for r in p.get("resources", []) if r.startswith("http://")]        if http_resources:            out.append(                _insight(                    p["url"],                    f"Mixed content: {len(http_resources)} http:// resource(s) on https:// page",                    TYPE_SECURITY,                    SEVERITY_WARNING,                    item=http_resources[0],                )            )    return outALL_CHECKS = [    # Core metadata    check_title_missing,    check_title_length,    check_duplicate_titles,    check_description_missing,    check_description_length,    check_duplicate_descriptions,    check_h1_missing,    check_h1_multiple,    check_h1_length,    check_duplicate_h1s,    check_heading_hierarchy,    check_canonical_missing,    check_canonical_offdomain,    check_canonical_broken,    check_robots_meta_noindex,    check_lang_missing,    check_viewport_missing,    check_og_incomplete,    check_twitter_card,    check_favicon,    check_json_ld_parse_error,    # Links    check_broken_internal_links,    check_broken_external_links,    check_redirect_chains,    check_nofollow_internal_links,    # Robots / sitemap    check_robots_missing,    check_sitemap_missing,    check_sitemap_not_in_robots,    check_sitemap_broken_urls,    check_pages_missing_from_sitemap,    # Accessibility    check_images_missing_alt,    check_empty_anchor_text,    check_form_inputs_unlabeled,    # Content    check_thin_content,    check_duplicate_content,    # Performance    check_slow_pages,    check_missing_compression,    check_oversized_pages,    # Security    check_mixed_content,]
added crawler/fetcher.py
@@ -0,0 +1,154 @@"""HTTP fetching, robots.txt, and sitemap loading."""import loggingimport urllib.robotparserfrom dataclasses import dataclass, fieldfrom urllib.parse import urlparseimport requestsfrom bs4 import BeautifulSouplogger = logging.getLogger(__name__)USER_AGENT = "status (+https://status.bythewood.me)"PAGE_CAP = 500CONCURRENCY = 4REQUEST_TIMEOUT = (5, 15)EXTERNAL_LINK_TIMEOUT = (3, 8)# Hard deadline for a single site crawl. Scheduler JOIN_TIMEOUT must exceed this.CRAWL_DEADLINE_SECONDS = 540@dataclassclass FetchResult:    url: str    requested_url: str    status: int    headers: dict    body: bytes    content_type: str    elapsed_ms: int    redirect_chain: list = field(default_factory=list)    error: str = ""def make_session():    s = requests.Session()    s.headers.update({"User-Agent": USER_AGENT})    return sdef fetch(session, url):    """GET a URL and return a FetchResult. Body is only captured for HTML."""    try:        r = session.get(url, timeout=REQUEST_TIMEOUT, allow_redirects=True)        chain = [(h.status_code, h.url) for h in r.history]        chain.append((r.status_code, r.url))        content_type = r.headers.get("content-type", "").lower()        body = r.content if "text/html" in content_type else b""        return FetchResult(            url=r.url,            requested_url=url,            status=r.status_code,            headers=dict(r.headers),            body=body,            content_type=content_type,            elapsed_ms=int(r.elapsed.total_seconds() * 1000),            redirect_chain=chain,        )    except requests.RequestException as e:        return FetchResult(            url=url,            requested_url=url,            status=0,            headers={},            body=b"",            content_type="",            elapsed_ms=0,            error=str(e),        )def head_status(session, url):    """Cheap check for external links. Returns the status code (0 on error)."""    try:        r = session.head(url, timeout=EXTERNAL_LINK_TIMEOUT, allow_redirects=True)        # Some servers reject HEAD with 405/403 but accept GET.        if r.status_code in (403, 405, 501):            r = session.get(                url, timeout=EXTERNAL_LINK_TIMEOUT, allow_redirects=True, stream=True            )            r.close()        return r.status_code    except requests.RequestException:        return 0def load_robots(session, base_origin):    """Fetch robots.txt. Returns (RobotFileParser, robots_url, raw_text_or_None)."""    robots_url = f"{base_origin}/robots.txt"    rp = urllib.robotparser.RobotFileParser()    text = None    try:        r = session.get(robots_url, timeout=REQUEST_TIMEOUT)        if r.status_code == 200:            text = r.text            rp.parse(text.splitlines())    except requests.RequestException:        pass    return rp, robots_url, textdef _parse_sitemap_xml(body):    """Return all <loc> values. Distinguishing index vs urlset is handled upstream."""    soup = BeautifulSoup(body, "xml")    return [loc.get_text(strip=True) for loc in soup.find_all("loc")]def load_sitemap(session, base_origin, robots_text):    """Return list of URLs from sitemap(s). Follows sitemap indexes one level.    Checks Sitemap: entries in robots.txt first, falls back to /sitemap.xml.    """    candidates = []    if robots_text:        for line in robots_text.splitlines():            if line.lower().startswith("sitemap:"):                candidates.append(line.split(":", 1)[1].strip())    if not candidates:        candidates.append(f"{base_origin}/sitemap.xml")    seen = set()    urls = []    to_fetch = list(candidates)    while to_fetch and len(seen) < 20:        smurl = to_fetch.pop()        if smurl in seen:            continue        seen.add(smurl)        try:            r = session.get(smurl, timeout=REQUEST_TIMEOUT)            if r.status_code != 200:                continue            for loc in _parse_sitemap_xml(r.content):                # Sub-sitemaps end in .xml; everything else is a page URL.                if loc.lower().endswith(".xml") or "sitemap" in loc.lower():                    to_fetch.append(loc)                else:                    urls.append(loc)        except requests.RequestException:            continue    return urlsdef same_site(url, host):    """True if `url` is on `host` or its www./apex counterpart."""    u = urlparse(url).netloc.lower()    h = host.lower()    if not u:        return False    if u == h:        return True    if u == "www." + h or h == "www." + u:        return True    return False
deleted crawler/items.py
@@ -1,5 +0,0 @@import scrapyclass CrawlerItem(scrapy.Item):    pass
deleted crawler/middlewares.py
@@ -1,97 +0,0 @@from scrapy import signalsfrom itemadapter import is_item, ItemAdapterclass CrawlerSpiderMiddleware:    # Not all methods need to be defined. If a method is not defined,    # scrapy acts as if the spider middleware does not modify the    # passed objects.    @classmethod    def from_crawler(cls, crawler):        # This method is used by Scrapy to create your spiders.        s = cls()        crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)        return s    def process_spider_input(self, response, spider):        # Called for each response that goes through the spider        # middleware and into the spider.        # Should return None or raise an exception.        return None    def process_spider_output(self, response, result, spider):        # Called with the results returned from the Spider, after        # it has processed the response.        # Must return an iterable of Request, or item objects.        for i in result:            yield i    def process_spider_exception(self, response, exception, spider):        # Called when a spider or process_spider_input() method        # (from other spider middleware) raises an exception.        # Should return either None or an iterable of Request or item objects.        pass    def process_start_requests(self, start_requests, spider):        # Called with the start requests of the spider, and works        # similarly to the process_spider_output() method, except        # that it doesn’t have a response associated.        # Must return only requests (not items).        for r in start_requests:            yield r    def spider_opened(self, spider):        spider.logger.info('Spider opened: %s' % spider.name)class CrawlerDownloaderMiddleware:    # Not all methods need to be defined. If a method is not defined,    # scrapy acts as if the downloader middleware does not modify the    # passed objects.    @classmethod    def from_crawler(cls, crawler):        # This method is used by Scrapy to create your spiders.        s = cls()        crawler.signals.connect(s.spider_opened, signal=signals.spider_opened)        return s    def process_request(self, request, spider):        # Called for each request that goes through the downloader        # middleware.        # Must either:        # - return None: continue processing this request        # - or return a Response object        # - or return a Request object        # - or raise IgnoreRequest: process_exception() methods of        #   installed downloader middleware will be called        return None    def process_response(self, request, response, spider):        # Called with the response returned from the downloader.        # Must either;        # - return a Response object        # - return a Request object        # - or raise IgnoreRequest        return response    def process_exception(self, request, exception, spider):        # Called when a download handler or a process_request()        # (from other downloader middleware) raises an exception.        # Must either:        # - return None: continue processing this exception        # - return a Response object: stops process_exception() chain        # - return a Request object: stops process_exception() chain        pass    def spider_opened(self, spider):        spider.logger.info('Spider opened: %s' % spider.name)
added crawler/parser.py
@@ -0,0 +1,167 @@"""HTML parsing: raw body -> structured page dict."""import hashlibimport jsonimport loggingfrom urllib.parse import urljoinfrom bs4 import BeautifulSouplogger = logging.getLogger(__name__)def parse_html(body, url):    """Extract everything the checks need from one HTML page.    Returns a dict with title, meta, headings, links, images, resources,    forms, json_ld, word count, text hash, favicon, lang, viewport, robots.    """    soup = BeautifulSoup(body, "lxml")    title = ""    if soup.title and soup.title.string:        title = soup.title.string.strip()    def meta_name(name):        tag = soup.find("meta", attrs={"name": name})        if tag and tag.get("content"):            return tag["content"].strip()        return ""    def meta_property(prop):        tag = soup.find("meta", attrs={"property": prop})        if tag and tag.get("content"):            return tag["content"].strip()        return ""    description = meta_name("description")    robots_meta = meta_name("robots")    viewport = meta_name("viewport")    canonical_tag = soup.find("link", rel="canonical")    canonical = ""    if canonical_tag and canonical_tag.get("href"):        canonical = urljoin(url, canonical_tag["href"].strip())    og = {        "title": meta_property("og:title"),        "description": meta_property("og:description"),        "image": meta_property("og:image"),        "url": meta_property("og:url"),    }    twitter = {        "card": meta_name("twitter:card"),        "title": meta_name("twitter:title"),        "description": meta_name("twitter:description"),    }    html_tag = soup.find("html")    lang = html_tag.get("lang", "").strip() if html_tag else ""    headings = {f"h{i}": [] for i in range(1, 7)}    for level in range(1, 7):        for h in soup.find_all(f"h{level}"):            headings[f"h{level}"].append(h.get_text(" ", strip=True))    links = []    for a in soup.find_all("a", href=True):        href = a["href"].strip()        if not href or href.startswith(("javascript:", "mailto:", "tel:", "#")):            continue        rel = a.get("rel") or []        if isinstance(rel, str):            rel = rel.split()        links.append(            {                "url": urljoin(url, href),                "text": a.get_text(" ", strip=True),                "rel": list(rel),            }        )    images = []    for img in soup.find_all("img"):        src = img.get("src", "").strip()        alt = img.get("alt")  # None = missing attribute, "" = explicitly empty        images.append(            {                "src": urljoin(url, src) if src else "",                "alt": alt,            }        )    resources = []    for tag in soup.find_all(["script", "link", "img", "iframe", "source"]):        src = tag.get("src") or tag.get("href")        if src and src.strip():            resources.append(urljoin(url, src.strip()))    json_ld = []    for s in soup.find_all("script", type="application/ld+json"):        raw = s.string or s.get_text() or ""        if not raw.strip():            continue        try:            json_ld.append(json.loads(raw))        except (ValueError, TypeError):            json_ld.append(None)  # parse error    favicon = ""    for link in soup.find_all("link", rel=True):        rels = link.get("rel", [])        if isinstance(rels, str):            rels = rels.split()        if any("icon" in r.lower() for r in rels):            href = link.get("href", "").strip()            if href:                favicon = urljoin(url, href)                break    forms = []    for form in soup.find_all("form"):        inputs = []        for i in form.find_all(["input", "textarea", "select"]):            inputs.append(                {                    "type": i.get("type", "text"),                    "name": i.get("name"),                    "id": i.get("id"),                    "aria_label": i.get("aria-label"),                }            )        label_fors = {lb.get("for") for lb in form.find_all("label") if lb.get("for")}        forms.append(            {                "action": urljoin(url, form.get("action", "")) if form.get("action") else url,                "inputs": inputs,                "label_fors": list(label_fors),            }        )    # Visible text for word count + duplicate detection.    for tag in soup(["script", "style", "noscript"]):        tag.decompose()    text = soup.get_text(" ", strip=True)    word_count = len(text.split())    text_hash = hashlib.sha256(text.encode("utf-8", errors="replace")).hexdigest()    return {        "title": title,        "description": description,        "canonical": canonical,        "robots_meta": robots_meta,        "viewport": viewport,        "lang": lang,        "og": og,        "twitter": twitter,        "headings": headings,        "links": links,        "images": images,        "resources": resources,        "json_ld": json_ld,        "favicon": favicon,        "forms": forms,        "word_count": word_count,        "text_hash": text_hash,    }
deleted crawler/pipelines.py
@@ -1,6 +0,0 @@from itemadapter import ItemAdapterclass CrawlerPipeline:    def process_item(self, item, spider):        return item
modified crawler/runner.py
@@ -1,52 +1,224 @@"""Entry point for the SEO crawler.Crawls a site, runs checks, writes per-page debug output, and returnsa list of insights. Designed to run in-process; no subprocess required."""import jsonimport loggingimport subprocessimport osimport timefrom collections import dequefrom concurrent.futures import ThreadPoolExecutor, as_completedfrom urllib.parse import urlparsefrom django.conf import settingsfrom . import checksfrom .fetcher import (    CONCURRENCY,    CRAWL_DEADLINE_SECONDS,    PAGE_CAP,    fetch,    head_status,    load_robots,    load_sitemap,    make_session,    same_site,)from .parser import parse_htmllogger = logging.getLogger(__name__)# Cap a single SEO crawl. Spiders that wedge on a slow site otherwise hold a# scheduler thread forever.SUBPROCESS_TIMEOUT_SECONDS = 600def _output_path(host):    base = "crawler_output" if settings.DEBUG else "/data/crawler_output"    return os.path.join(base, f"{host}.json")def run_seo_spider(url):    """    Run the SEO spider on the given URL.    I'm using the command line runner instead of the python runner because the    python runner doesn't play well with threads and I can run multiple spiders    with the command line runner.    """    filename = url.split('/')[2] + '.json'    if settings.DEBUG:        filename = 'crawler_output/' + filename    else:        filename = '/data/crawler_output/' + filename    # remove the file if it exists before running the spider    if os.path.exists(filename):        os.remove(filename)    # use the jsonlines format to store the results    try:        subprocess.run(            [                'uv',                'run',                'scrapy',                'crawl',                'seo_spider',                '-a',                'url=' + url,                '-t',                'jsonlines',                '-o',                filename,            ],            timeout=SUBPROCESS_TIMEOUT_SECONDS,def _normalize_url(url):    # Drop fragments; keep query strings since they often distinguish pages.    p = urlparse(url)    cleaned = p._replace(fragment="").geturl()    return cleaned.rstrip("/") or cleaneddef crawl(start_url):    """Fetch up to PAGE_CAP pages from the same host and collect metadata."""    session = make_session()    parsed = urlparse(start_url)    host = parsed.netloc    base_origin = f"{parsed.scheme}://{parsed.netloc}"    rp, robots_url, robots_text = load_robots(session, base_origin)    sitemap_urls = load_sitemap(session, base_origin, robots_text)    seen = set()    queue = deque()    pages = []    fetched = set()    deadline = time.time() + CRAWL_DEADLINE_SECONDS    def enqueue(url):        n = _normalize_url(url)        if n in seen:            return        seen.add(n)        queue.append(url)    enqueue(start_url)    # Seed with sitemap URLs so sitemap-only pages get crawled too.    for url in sitemap_urls[:PAGE_CAP]:        if same_site(url, host):            enqueue(url)    with ThreadPoolExecutor(max_workers=CONCURRENCY) as ex:        while queue and len(pages) < PAGE_CAP and time.time() < deadline:            batch = []            while queue and len(batch) < CONCURRENCY and len(pages) + len(batch) < PAGE_CAP:                url = queue.popleft()                if not rp.can_fetch("*", url):                    continue                batch.append(url)            if not batch:                break            futures = [ex.submit(fetch, session, u) for u in batch]            for f in as_completed(futures):                r = f.result()                # Different requested URLs can collapse to the same final URL                # after redirects. Drop duplicates so checks don't double-flag.                final_key = _normalize_url(r.url)                if final_key in fetched:                    seen.add(final_key)                    continue                fetched.add(final_key)                is_html = r.status == 200 and "text/html" in r.content_type                page = {                    "url": r.url,                    "requested_url": r.requested_url,                    "status": r.status,                    "content_type": r.content_type,                    "elapsed_ms": r.elapsed_ms,                    "bytes": len(r.body),                    "headers": r.headers,                    "redirect_chain": r.redirect_chain,                    "error": r.error,                    "is_html": is_html,                }                if is_html:                    try:                        page.update(parse_html(r.body, r.url))                    except Exception:                        logger.exception("[crawler] parse failed for %s", r.url)                        page["is_html"] = False                    else:                        for link in page.get("links", []):                            lu = link["url"]                            if same_site(lu, host):                                enqueue(lu)                # Ensure the final redirected URL is considered "seen" too, so                # we don't refetch it on another pass.                seen.add(_normalize_url(r.url))                pages.append(page)    if time.time() >= deadline:        logger.warning(            "[crawler] hit deadline for %s after %d pages",            start_url,            len(pages),        )    except subprocess.TimeoutExpired:        logger.warning("SEO spider timed out after %ss for %s", SUBPROCESS_TIMEOUT_SECONDS, url)    # External link HEAD check. Only checks same unique URL once.    external_links = set()    for p in pages:        if not p.get("is_html"):            continue        for link in p.get("links", []):            if not same_site(link["url"], host):                external_links.add(link["url"])    external_link_status = {}    if external_links and time.time() < deadline:        with ThreadPoolExecutor(max_workers=CONCURRENCY) as ex:            futures = {ex.submit(head_status, session, u): u for u in external_links}            for f in as_completed(futures):                url = futures[f]                try:                    external_link_status[url] = f.result()                except Exception:                    external_link_status[url] = 0    return {        "start_url": start_url,        "host": host,        "pages": pages,        "external_link_status": external_link_status,        "sitemap_urls": sitemap_urls,        "robots": {            "url": robots_url,            "exists": robots_text is not None,            "raw": robots_text,            "references_sitemap": bool(                robots_text                and any(                    line.lower().startswith("sitemap:")                    for line in robots_text.splitlines()                )            ),        },    }def run_checks(crawl_result):    """Build a ctx dict and run every check. Returns the flat insight list."""    ctx = {        "start_url": crawl_result["start_url"],        "host": crawl_result["host"],        "pages": crawl_result["pages"],        "html_pages": [p for p in crawl_result["pages"] if p.get("is_html")],        "status_map": {p["url"]: p["status"] for p in crawl_result["pages"]},        "external_link_status": crawl_result["external_link_status"],        "sitemap_urls": crawl_result["sitemap_urls"],        "robots": crawl_result["robots"],    }    insights = []    for fn in checks.ALL_CHECKS:        try:            insights.extend(fn(ctx))        except Exception:            logger.exception("[crawler] check %s failed", fn.__name__)    return insightsdef _write_debug_output(crawl_result):    host = crawl_result["host"]    path = _output_path(host)    try:        os.makedirs(os.path.dirname(path), exist_ok=True)        with open(path, "w") as f:            for p in crawl_result["pages"]:                # Strip bulky fields from the debug file.                snapshot = {k: v for k, v in p.items() if k != "headers"}                f.write(json.dumps(snapshot, default=str) + "\n")    except OSError:        logger.exception("[crawler] failed writing debug output to %s", path)def run_seo_spider(url):    """Crawl `url`, write debug output, return list of insight dicts."""    start = time.time()    logger.info("[crawler] starting %s", url)    result = crawl(url)    insights = run_checks(result)    _write_debug_output(result)    logger.info(        "[crawler] done %s - %d pages, %d insights, %.1fs",        url,        len(result["pages"]),        len(insights),        time.time() - start,    )    return insights
deleted crawler/settings.py
@@ -1,6 +0,0 @@BOT_NAME = 'status'SPIDER_MODULES = ['crawler.spiders']NEWSPIDER_MODULE = 'crawler.spiders'USER_AGENT = 'status (+https://status.bythewood.me)'ROBOTSTXT_OBEY = TrueCONCURRENT_REQUESTS = 2
deleted crawler/spiders/__init__.py
@@ -1,4 +0,0 @@# This package will contain the spiders of your Scrapy project## Please refer to the documentation for information on how to create and manage# your spiders.
deleted crawler/spiders/seo_spider.py
@@ -1,104 +0,0 @@from scrapy.linkextractors import LinkExtractorfrom scrapy.spiders import CrawlSpider, Rulefrom w3lib.url import url_query_cleanerdef process_links(links):    for link in links:        link.url = url_query_cleaner(link.url)        yield linkclass SEOSpider(CrawlSpider):    name = 'seo_spider'    handle_httpstatus_list = [200, 301, 302, 303, 307, 400, 401, 403, 404, 500]    rules = (        # Rule(        #     LinkExtractor(        #         allow_domains=config['allow_domains'],        #     ),        #     callback='parse_local',        #     follow=True,        # ),        # Rule(        #     LinkExtractor(),        #     callback='parse_external',        # ),        Rule(            LinkExtractor(),            callback='parse_local',            follow=True,            process_links=process_links,        ),    )    def __init__(self, url, *args, **kwargs):        super().__init__(*args, **kwargs)        self.start_urls = [url]        self.allowed_domains = [url.split('/')[2]]    def parse_local(self, response):        content_type = response.headers.get('Content-Type', b'').decode("utf-8")        if response.status != 200:            return {                'url': response.url,                'status': response.status,                'type': 'local',                'content_type': content_type,                'title': '',                'description': '',                'canonical': '',                'og_title': '',                'og_description': '',                'og_image': '',                'og_url': '',                'h1': '',            }        if "text/html" not in content_type:            return {                'url': response.url,                'status': response.status,                'type': 'local',                'content_type': content_type,                'title': '',                'description': '',                'canonical': '',                'og_title': '',                'og_description': '',                'og_image': '',                'og_url': '',                'h1': '',            }        return {            'url': response.url,            'status': response.status,            'type': 'local',            'content_type': content_type,            'title': response.xpath('normalize-space(//title)').get(),            'description': response.xpath('normalize-space(//meta[@name="description"]/@content)').get(),            'canonical': response.xpath('normalize-space(//link[@rel="canonical"]/@href)').get(),            'og_title': response.xpath('normalize-space(//meta[@property="og:title"]/@content)').get(),            'og_description': response.xpath('normalize-space(//meta[@property="og:description"]/@content)').get(),            'og_image': response.xpath('normalize-space(//meta[@property="og:image"]/@content)').get(),            'og_url': response.xpath('normalize-space(//meta[@property="og:url"]/@content)').get(),            'h1': response.xpath('normalize-space(//h1)').get(),        }    # def parse_external(self, response):    #     if response.status != 200:    #         return {    #             'url': response.url,    #             'status': response.status,    #             'type': 'external',    #         }    #     return {    #         'url': response.url,    #         'status': response.status,    #         'type': 'external',    #         'content_type': response.headers.get('Content-Type', b'').decode("utf-8"),    #     }
modified properties/management/commands/scheduler.py
@@ -46,9 +46,10 @@ class Command(BaseCommand):    def queue_process(self):        # Cap on join() so a wedged lighthouse/crawler can't freeze the queue        # indefinitely. Must exceed status.lighthouse.SUBPROCESS_TIMEOUT_SECONDS        # so a normal slow run still completes inside the window.        JOIN_TIMEOUT = 300        # indefinitely. Must exceed both status.lighthouse.SUBPROCESS_TIMEOUT_SECONDS        # (180s) and crawler.fetcher.CRAWL_DEADLINE_SECONDS (540s) so a normal        # slow run still completes inside the window.        JOIN_TIMEOUT = 900        while True:            if not q.empty():                threads = []
modified properties/models.py
@@ -1,7 +1,5 @@import reimport uuidimport osimport jsonimport loggingimport requests
@@ -11,7 +9,6 @@ from django.db import models, transactionfrom django.template.loader import render_to_stringfrom django.utils import timezonefrom django.utils.functional import cached_propertyfrom django.conf import settingsfrom crawler.runner import run_seo_spiderfrom status.lighthouse import (
@@ -208,36 +205,8 @@ class AlertsMixin:class CrawlerMixin:    @cached_property    def get_crawl_output(self):        """        This will fetch crawler output in the JSON format from the folders:        - DEBUG == True: crawler_output/        - DEBUG == False: /data/crawler_output/        The filename in the folder is the site URL `self.url.split("/")[2] + ".json"`.        Need to parse every line individually to get the data.        """        if settings.DEBUG:            path = "crawler_output/"        else:            path = "/data/crawler_output/"        try:            with open(os.path.join(path, self.url.split("/")[2] + ".json")) as f:                data = []                for line in f:                    data.append(json.loads(line))                return data        except FileNotFoundError:            return []    def get_next_run_at_crawl(self):        """        Should check weekly, users can recrawl whenever they want.        """        """Weekly crawl by default; users can trigger a recrawl anytime."""        return timezone.now() + timezone.timedelta(days=7)    def should_check_crawl(self):
@@ -248,135 +217,10 @@ class CrawlerMixin:            return True        return self.next_run_at_crawler <= now    def parse_page(self, page, duplicates=None):        insights = []        # Make sure the content type is text/html else skip        if "text/html" not in page.get("content_type", ""):            return insights        duplicates = duplicates or {"title": set(), "description": set(), "h1": set()}        # Make sure all pages have a title        if page['title'] == '':            logger.warning(f"Page {page['url']} has no title")            insights.append({                'url': page['url'],                'issue': 'Page has no title',                'type': 'seo',            })        # Make sure pages have a title between 30 and 60 characters        if len(page['title']) < 30 or len(page['title']) > 60:            logger.warning(f"Page {page['url']} has title of length {len(page['title'])}")            insights.append({                'url': page['url'],                'item': page['title'],                'issue': 'Page title is not between 30 and 60 characters',                'type': 'seo',            })        # Make sure pages have a unique title        if page['title'] in duplicates['title']:            logger.warning(f"Page {page['url']} has duplicate title")            insights.append({                'url': page['url'],                'item': page['title'],                'issue': 'Page has duplicate title',                'type': 'seo',            })        # Make sure pages have a description        if page['description'] == '':            logger.warning(f"Page {page['url']} has no description")            insights.append({                'url': page['url'],                'issue': 'Page has no description',                'type': 'seo',            })        # Make sure pages have a description between 70 and 160 characters        if len(page['description']) < 70 or len(page['description']) > 160:            logger.warning(f"Page {page['url']} has description of length {len(page['description'])}")            insights.append({                'url': page['url'],                'item': page['description'],                'issue': 'Page description is not between 70 and 160 characters',                'type': 'seo',            })        # Make sure pages have a unique description        if page['description'] in duplicates['description']:            logger.warning(f"Page {page['url']} has duplicate description")            insights.append({                'url': page['url'],                'item': page['description'],                'issue': 'Page has duplicate description',                'type': 'seo',            })        # Make sure pages have an h1        if page['h1'] == '':            logger.warning(f"Page {page['url']} has no h1")            insights.append({                'url': page['url'],                'issue': 'Page has no h1',                'type': 'seo',            })        # Make sure pages have an h1 between 20 and 70 characters        if len(page['h1']) < 20 or len(page['h1']) > 70:            logger.warning(f"Page {page['url']} has h1 of length {len(page['h1'])}")            insights.append({                'url': page['url'],                'item': page['h1'],                'issue': 'Page h1 is not between 20 and 70 characters',                'type': 'seo',            })        # Make sure pages have a unique h1        if page['h1'] in duplicates['h1']:            logger.warning(f"Page {page['url']} has duplicate h1")            insights.append({                'url': page['url'],                'item': page['h1'],                'issue': 'Page has duplicate h1',                'type': 'seo',            })        # Make sure pages have a canonical url        if page['canonical'] == '':            logger.warning(f"Page {page['url']} has no canonical url")            insights.append({                'url': page['url'],                'issue': 'Page has no canonical url',                'type': 'seo',            })        return insights    def parse_crawl(self):        # Pre-compute the set of values that appear on more than one page so the        # per-page uniqueness check is O(1) instead of scanning the full crawl.        duplicates = {"title": set(), "description": set(), "h1": set()}        for field in duplicates:            seen = set()            for p in self.get_crawl_output:                value = p.get(field, "")                if value in seen:                    duplicates[field].add(value)                else:                    seen.add(value)        insights = []        for page in self.get_crawl_output:            insights.extend(self.parse_page(page, duplicates=duplicates))        self.crawler_insights = insights        self.save(update_fields=['crawler_insights'])    def crawl_site(self):        run_seo_spider(self.url)        self.parse_crawl()        insights = run_seo_spider(self.url)        self.crawler_insights = insights        self.save(update_fields=["crawler_insights"])class Property(CrawlerMixin, AlertsMixin, SecurityMixin, models.Model):
modified properties/templates/properties/property.html
@@ -208,25 +208,20 @@{% if property.crawler_insights %}<div class="container mt-4">  {% regroup property.crawler_insights|dictsort:"type" by type as insight_type_groups %}  {% for group in insight_type_groups %}  <h3 class="mt-4 text-capitalize">{{ group.grouper }} <small class="text-muted">({{ group.list|length }})</small></h3>  <div class="row bg-dark text-white py-2 mb-2 rounded rounded-sm fw-bolder">    <div class="col-1">      Type    </div>    <div class="col-3">      URL    </div>    <div class="col-5">      Issue    </div>    <div class="col-3">      Item    </div>    <div class="col-2">Severity</div>    <div class="col-3">URL</div>    <div class="col-4">Issue</div>    <div class="col-3">Item</div>  </div>  {% for insight in property.crawler_insights|dictsort:"url" %}  {% for insight in group.list|dictsort:"severity" %}  <div class="row bg-light py-2 mb-2 rounded rounded-sm">    <div class="col-md-1 d-flex align-items-center">      <span class="badge bg-info">        {{ insight.type|upper }}    <div class="col-md-2 d-flex align-items-center">      <span class="badge {% if insight.severity == 'error' %}bg-danger{% elif insight.severity == 'warning' %}bg-warning text-dark{% else %}bg-info text-dark{% endif %}">        {{ insight.severity|upper }}      </span>    </div>    <div class="col-md-3 text-truncate">
@@ -234,7 +229,7 @@        {{ insight.url|url_path }}      </a>    </div>    <div class="col-md-5 text-truncate" {% if insight.issue %}data-bs-toggle="tooltip" data-bs-title="{{ insight.issue }}"{% endif %}>    <div class="col-md-4 text-truncate" {% if insight.issue %}data-bs-toggle="tooltip" data-bs-title="{{ insight.issue }}"{% endif %}>      {{ insight.issue }}    </div>    <div class="col-md-3 text-truncate" {% if insight.item %}data-bs-toggle="tooltip" data-bs-title="{{ insight.item }}"{% endif %}>
@@ -242,6 +237,7 @@    </div>  </div>  {% endfor %}  {% endfor %}</div>{% endif %}{% endblock %}
modified pyproject.toml
@@ -9,8 +9,8 @@ dependencies = [    "tzdata",    "uvicorn",    "whitenoise",    "scrapy",    "w3lib",    "beautifulsoup4",    "lxml",][dependency-groups]
deleted scrapy.cfg
@@ -1,2 +0,0 @@[settings]default = crawler.settings
modified uv.lock
@@ -19,21 +19,16 @@ wheels = [][[package]]name = "attrs"version = "26.1.0"name = "beautifulsoup4"version = "4.14.3"source = { registry = "https://pypi.org/simple" }sdist = { url = "https://files.pythonhosted.org/packages/9a/8e/82a0fe20a541c03148528be8cac2408564a6c9a0cc7e9171802bc1d26985/attrs-26.1.0.tar.gz", hash = "sha256:d03ceb89cb322a8fd706d4fb91940737b6642aa36998fe130a9bc96c985eff32", size = 952055, upload-time = "2026-03-19T14:22:25.026Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/64/b4/17d4b0b2a2dc85a6df63d1157e028ed19f90d4cd97c36717afef2bc2f395/attrs-26.1.0-py3-none-any.whl", hash = "sha256:c647aa4a12dfbad9333ca4e71fe62ddc36f4e63b2d260a37a8b83d2f043ac309", size = 67548, upload-time = "2026-03-19T14:22:23.645Z" },dependencies = [    { name = "soupsieve" },    { name = "typing-extensions" },][[package]]name = "automat"version = "25.4.16"source = { registry = "https://pypi.org/simple" }sdist = { url = "https://files.pythonhosted.org/packages/e3/0f/d40bbe294bbf004d436a8bcbcfaadca8b5140d39ad0ad3d73d1a8ba15f14/automat-25.4.16.tar.gz", hash = "sha256:0017591a5477066e90d26b0e696ddc143baafd87b588cfac8100bc6be9634de0", size = 129977, upload-time = "2025-04-16T20:12:16.002Z" }sdist = { url = "https://files.pythonhosted.org/packages/c3/b0/1c6a16426d389813b48d95e26898aff79abbde42ad353958ad95cc8c9b21/beautifulsoup4-4.14.3.tar.gz", hash = "sha256:6292b1c5186d356bba669ef9f7f051757099565ad9ada5dd630bd9de5fa7fb86", size = 627737, upload-time = "2025-11-30T15:08:26.084Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/02/ff/1175b0b7371e46244032d43a56862d0af455823b5280a50c63d99cc50f18/automat-25.4.16-py3-none-any.whl", hash = "sha256:04e9bce696a8d5671ee698005af6e5a9fa15354140a87f4870744604dcdd3ba1", size = 42842, upload-time = "2025-04-16T20:12:14.447Z" },    { url = "https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb", size = 107721, upload-time = "2025-11-30T15:08:24.087Z" },][[package]]
@@ -89,88 +84,6 @@ wheels = [    { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" },][[package]]name = "cffi"version = "2.0.0"source = { registry = "https://pypi.org/simple" }dependencies = [    { name = "pycparser", marker = "implementation_name != 'PyPy'" },]sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/93/d7/516d984057745a6cd96575eea814fe1edd6646ee6efd552fb7b0921dec83/cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44", size = 184283, upload-time = "2025-09-08T23:22:08.01Z" },    { url = "https://files.pythonhosted.org/packages/9e/84/ad6a0b408daa859246f57c03efd28e5dd1b33c21737c2db84cae8c237aa5/cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49", size = 180504, upload-time = "2025-09-08T23:22:10.637Z" },    { url = "https://files.pythonhosted.org/packages/50/bd/b1a6362b80628111e6653c961f987faa55262b4002fcec42308cad1db680/cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c", size = 208811, upload-time = "2025-09-08T23:22:12.267Z" },    { url = "https://files.pythonhosted.org/packages/4f/27/6933a8b2562d7bd1fb595074cf99cc81fc3789f6a6c05cdabb46284a3188/cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb", size = 216402, upload-time = "2025-09-08T23:22:13.455Z" },    { url = "https://files.pythonhosted.org/packages/05/eb/b86f2a2645b62adcfff53b0dd97e8dfafb5c8aa864bd0d9a2c2049a0d551/cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0", size = 203217, upload-time = "2025-09-08T23:22:14.596Z" },    { url = "https://files.pythonhosted.org/packages/9f/e0/6cbe77a53acf5acc7c08cc186c9928864bd7c005f9efd0d126884858a5fe/cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4", size = 203079, upload-time = "2025-09-08T23:22:15.769Z" },    { url = "https://files.pythonhosted.org/packages/98/29/9b366e70e243eb3d14a5cb488dfd3a0b6b2f1fb001a203f653b93ccfac88/cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453", size = 216475, upload-time = "2025-09-08T23:22:17.427Z" },    { url = "https://files.pythonhosted.org/packages/21/7a/13b24e70d2f90a322f2900c5d8e1f14fa7e2a6b3332b7309ba7b2ba51a5a/cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495", size = 218829, upload-time = "2025-09-08T23:22:19.069Z" },    { url = "https://files.pythonhosted.org/packages/60/99/c9dc110974c59cc981b1f5b66e1d8af8af764e00f0293266824d9c4254bc/cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5", size = 211211, upload-time = "2025-09-08T23:22:20.588Z" },    { url = "https://files.pythonhosted.org/packages/49/72/ff2d12dbf21aca1b32a40ed792ee6b40f6dc3a9cf1644bd7ef6e95e0ac5e/cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb", size = 218036, upload-time = "2025-09-08T23:22:22.143Z" },    { url = "https://files.pythonhosted.org/packages/e2/cc/027d7fb82e58c48ea717149b03bcadcbdc293553edb283af792bd4bcbb3f/cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a", size = 172184, upload-time = "2025-09-08T23:22:23.328Z" },    { url = "https://files.pythonhosted.org/packages/33/fa/072dd15ae27fbb4e06b437eb6e944e75b068deb09e2a2826039e49ee2045/cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739", size = 182790, upload-time = "2025-09-08T23:22:24.752Z" },    { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" },    { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" },    { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" },    { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" },    { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" },    { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" },    { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" },    { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" },    { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" },    { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" },    { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" },    { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" },    { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" },    { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" },    { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" },    { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" },    { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" },    { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" },    { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" },    { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" },    { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" },    { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" },    { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" },    { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" },    { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" },    { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" },    { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" },    { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" },    { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" },    { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" },    { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" },    { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" },    { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" },    { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" },    { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" },    { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" },    { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" },    { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" },    { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" },    { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" },    { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" },    { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" },    { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" },    { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" },    { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" },    { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" },    { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" },    { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" },    { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" },    { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" },    { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" },    { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" },    { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" },    { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" },    { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" },    { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" },    { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" },    { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" },    { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" },][[package]]name = "charset-normalizer"version = "3.4.7"
@@ -297,93 +210,6 @@ wheels = [    { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },][[package]]name = "constantly"version = "23.10.4"source = { registry = "https://pypi.org/simple" }sdist = { url = "https://files.pythonhosted.org/packages/4d/6f/cb2a94494ff74aa9528a36c5b1422756330a75a8367bf20bd63171fc324d/constantly-23.10.4.tar.gz", hash = "sha256:aa92b70a33e2ac0bb33cd745eb61776594dc48764b06c35e0efd050b7f1c7cbd", size = 13300, upload-time = "2023-10-28T23:18:24.316Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/b8/40/c199d095151addf69efdb4b9ca3a4f20f70e20508d6222bffb9b76f58573/constantly-23.10.4-py3-none-any.whl", hash = "sha256:3fd9b4d1c3dc1ec9757f3c52aef7e53ad9323dbe39f51dfd4c43853b68dfa3f9", size = 13547, upload-time = "2023-10-28T23:18:23.038Z" },][[package]]name = "cryptography"version = "46.0.7"source = { registry = "https://pypi.org/simple" }dependencies = [    { name = "cffi", marker = "platform_python_implementation != 'PyPy'" },    { name = "typing-extensions", marker = "python_full_version < '3.11'" },]sdist = { url = "https://files.pythonhosted.org/packages/47/93/ac8f3d5ff04d54bc814e961a43ae5b0b146154c89c61b47bb07557679b18/cryptography-46.0.7.tar.gz", hash = "sha256:e4cfd68c5f3e0bfdad0d38e023239b96a2fe84146481852dffbcca442c245aa5", size = 750652, upload-time = "2026-04-08T01:57:54.692Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/0b/5d/4a8f770695d73be252331e60e526291e3df0c9b27556a90a6b47bccca4c2/cryptography-46.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:ea42cbe97209df307fdc3b155f1b6fa2577c0defa8f1f7d3be7d31d189108ad4", size = 7179869, upload-time = "2026-04-08T01:56:17.157Z" },    { url = "https://files.pythonhosted.org/packages/5f/45/6d80dc379b0bbc1f9d1e429f42e4cb9e1d319c7a8201beffd967c516ea01/cryptography-46.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b36a4695e29fe69215d75960b22577197aca3f7a25b9cf9d165dcfe9d80bc325", size = 4275492, upload-time = "2026-04-08T01:56:19.36Z" },    { url = "https://files.pythonhosted.org/packages/4a/9a/1765afe9f572e239c3469f2cb429f3ba7b31878c893b246b4b2994ffe2fe/cryptography-46.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ad9ef796328c5e3c4ceed237a183f5d41d21150f972455a9d926593a1dcb308", size = 4426670, upload-time = "2026-04-08T01:56:21.415Z" },    { url = "https://files.pythonhosted.org/packages/8f/3e/af9246aaf23cd4ee060699adab1e47ced3f5f7e7a8ffdd339f817b446462/cryptography-46.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:73510b83623e080a2c35c62c15298096e2a5dc8d51c3b4e1740211839d0dea77", size = 4280275, upload-time = "2026-04-08T01:56:23.539Z" },    { url = "https://files.pythonhosted.org/packages/0f/54/6bbbfc5efe86f9d71041827b793c24811a017c6ac0fd12883e4caa86b8ed/cryptography-46.0.7-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cbd5fb06b62bd0721e1170273d3f4d5a277044c47ca27ee257025146c34cbdd1", size = 4928402, upload-time = "2026-04-08T01:56:25.624Z" },    { url = "https://files.pythonhosted.org/packages/2d/cf/054b9d8220f81509939599c8bdbc0c408dbd2bdd41688616a20731371fe0/cryptography-46.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:420b1e4109cc95f0e5700eed79908cef9268265c773d3a66f7af1eef53d409ef", size = 4459985, upload-time = "2026-04-08T01:56:27.309Z" },    { url = "https://files.pythonhosted.org/packages/f9/46/4e4e9c6040fb01c7467d47217d2f882daddeb8828f7df800cb806d8a2288/cryptography-46.0.7-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:24402210aa54baae71d99441d15bb5a1919c195398a87b563df84468160a65de", size = 3990652, upload-time = "2026-04-08T01:56:29.095Z" },    { url = "https://files.pythonhosted.org/packages/36/5f/313586c3be5a2fbe87e4c9a254207b860155a8e1f3cca99f9910008e7d08/cryptography-46.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8a469028a86f12eb7d2fe97162d0634026d92a21f3ae0ac87ed1c4a447886c83", size = 4279805, upload-time = "2026-04-08T01:56:30.928Z" },    { url = "https://files.pythonhosted.org/packages/69/33/60dfc4595f334a2082749673386a4d05e4f0cf4df8248e63b2c3437585f2/cryptography-46.0.7-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9694078c5d44c157ef3162e3bf3946510b857df5a3955458381d1c7cfc143ddb", size = 4892883, upload-time = "2026-04-08T01:56:32.614Z" },    { url = "https://files.pythonhosted.org/packages/c7/0b/333ddab4270c4f5b972f980adef4faa66951a4aaf646ca067af597f15563/cryptography-46.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:42a1e5f98abb6391717978baf9f90dc28a743b7d9be7f0751a6f56a75d14065b", size = 4459756, upload-time = "2026-04-08T01:56:34.306Z" },    { url = "https://files.pythonhosted.org/packages/d2/14/633913398b43b75f1234834170947957c6b623d1701ffc7a9600da907e89/cryptography-46.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91bbcb08347344f810cbe49065914fe048949648f6bd5c2519f34619142bbe85", size = 4410244, upload-time = "2026-04-08T01:56:35.977Z" },    { url = "https://files.pythonhosted.org/packages/10/f2/19ceb3b3dc14009373432af0c13f46aa08e3ce334ec6eff13492e1812ccd/cryptography-46.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5d1c02a14ceb9148cc7816249f64f623fbfee39e8c03b3650d842ad3f34d637e", size = 4674868, upload-time = "2026-04-08T01:56:38.034Z" },    { url = "https://files.pythonhosted.org/packages/1a/bb/a5c213c19ee94b15dfccc48f363738633a493812687f5567addbcbba9f6f/cryptography-46.0.7-cp311-abi3-win32.whl", hash = "sha256:d23c8ca48e44ee015cd0a54aeccdf9f09004eba9fc96f38c911011d9ff1bd457", size = 3026504, upload-time = "2026-04-08T01:56:39.666Z" },    { url = "https://files.pythonhosted.org/packages/2b/02/7788f9fefa1d060ca68717c3901ae7fffa21ee087a90b7f23c7a603c32ae/cryptography-46.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:397655da831414d165029da9bc483bed2fe0e75dde6a1523ec2fe63f3c46046b", size = 3488363, upload-time = "2026-04-08T01:56:41.893Z" },    { url = "https://files.pythonhosted.org/packages/7b/56/15619b210e689c5403bb0540e4cb7dbf11a6bf42e483b7644e471a2812b3/cryptography-46.0.7-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:d151173275e1728cf7839aaa80c34fe550c04ddb27b34f48c232193df8db5842", size = 7119671, upload-time = "2026-04-08T01:56:44Z" },    { url = "https://files.pythonhosted.org/packages/74/66/e3ce040721b0b5599e175ba91ab08884c75928fbeb74597dd10ef13505d2/cryptography-46.0.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:db0f493b9181c7820c8134437eb8b0b4792085d37dbb24da050476ccb664e59c", size = 4268551, upload-time = "2026-04-08T01:56:46.071Z" },    { url = "https://files.pythonhosted.org/packages/03/11/5e395f961d6868269835dee1bafec6a1ac176505a167f68b7d8818431068/cryptography-46.0.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ebd6daf519b9f189f85c479427bbd6e9c9037862cf8fe89ee35503bd209ed902", size = 4408887, upload-time = "2026-04-08T01:56:47.718Z" },    { url = "https://files.pythonhosted.org/packages/40/53/8ed1cf4c3b9c8e611e7122fb56f1c32d09e1fff0f1d77e78d9ff7c82653e/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:b7b412817be92117ec5ed95f880defe9cf18a832e8cafacf0a22337dc1981b4d", size = 4271354, upload-time = "2026-04-08T01:56:49.312Z" },    { url = "https://files.pythonhosted.org/packages/50/46/cf71e26025c2e767c5609162c866a78e8a2915bbcfa408b7ca495c6140c4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:fbfd0e5f273877695cb93baf14b185f4878128b250cc9f8e617ea0c025dfb022", size = 4905845, upload-time = "2026-04-08T01:56:50.916Z" },    { url = "https://files.pythonhosted.org/packages/c0/ea/01276740375bac6249d0a971ebdf6b4dc9ead0ee0a34ef3b5a88c1a9b0d4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:ffca7aa1d00cf7d6469b988c581598f2259e46215e0140af408966a24cf086ce", size = 4444641, upload-time = "2026-04-08T01:56:52.882Z" },    { url = "https://files.pythonhosted.org/packages/3d/4c/7d258f169ae71230f25d9f3d06caabcff8c3baf0978e2b7d65e0acac3827/cryptography-46.0.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:60627cf07e0d9274338521205899337c5d18249db56865f943cbe753aa96f40f", size = 3967749, upload-time = "2026-04-08T01:56:54.597Z" },    { url = "https://files.pythonhosted.org/packages/b5/2a/2ea0767cad19e71b3530e4cad9605d0b5e338b6a1e72c37c9c1ceb86c333/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:80406c3065e2c55d7f49a9550fe0c49b3f12e5bfff5dedb727e319e1afb9bf99", size = 4270942, upload-time = "2026-04-08T01:56:56.416Z" },    { url = "https://files.pythonhosted.org/packages/41/3d/fe14df95a83319af25717677e956567a105bb6ab25641acaa093db79975d/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:c5b1ccd1239f48b7151a65bc6dd54bcfcc15e028c8ac126d3fada09db0e07ef1", size = 4871079, upload-time = "2026-04-08T01:56:58.31Z" },    { url = "https://files.pythonhosted.org/packages/9c/59/4a479e0f36f8f378d397f4eab4c850b4ffb79a2f0d58704b8fa0703ddc11/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:d5f7520159cd9c2154eb61eb67548ca05c5774d39e9c2c4339fd793fe7d097b2", size = 4443999, upload-time = "2026-04-08T01:57:00.508Z" },    { url = "https://files.pythonhosted.org/packages/28/17/b59a741645822ec6d04732b43c5d35e4ef58be7bfa84a81e5ae6f05a1d33/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fcd8eac50d9138c1d7fc53a653ba60a2bee81a505f9f8850b6b2888555a45d0e", size = 4399191, upload-time = "2026-04-08T01:57:02.654Z" },    { url = "https://files.pythonhosted.org/packages/59/6a/bb2e166d6d0e0955f1e9ff70f10ec4b2824c9cfcdb4da772c7dd69cc7d80/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:65814c60f8cc400c63131584e3e1fad01235edba2614b61fbfbfa954082db0ee", size = 4655782, upload-time = "2026-04-08T01:57:04.592Z" },    { url = "https://files.pythonhosted.org/packages/95/b6/3da51d48415bcb63b00dc17c2eff3a651b7c4fed484308d0f19b30e8cb2c/cryptography-46.0.7-cp314-cp314t-win32.whl", hash = "sha256:fdd1736fed309b4300346f88f74cd120c27c56852c3838cab416e7a166f67298", size = 3002227, upload-time = "2026-04-08T01:57:06.91Z" },    { url = "https://files.pythonhosted.org/packages/32/a8/9f0e4ed57ec9cebe506e58db11ae472972ecb0c659e4d52bbaee80ca340a/cryptography-46.0.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e06acf3c99be55aa3b516397fe42f5855597f430add9c17fa46bf2e0fb34c9bb", size = 3475332, upload-time = "2026-04-08T01:57:08.807Z" },    { url = "https://files.pythonhosted.org/packages/a7/7f/cd42fc3614386bc0c12f0cb3c4ae1fc2bbca5c9662dfed031514911d513d/cryptography-46.0.7-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:462ad5cb1c148a22b2e3bcc5ad52504dff325d17daf5df8d88c17dda1f75f2a4", size = 7165618, upload-time = "2026-04-08T01:57:10.645Z" },    { url = "https://files.pythonhosted.org/packages/a5/d0/36a49f0262d2319139d2829f773f1b97ef8aef7f97e6e5bd21455e5a8fb5/cryptography-46.0.7-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:84d4cced91f0f159a7ddacad249cc077e63195c36aac40b4150e7a57e84fffe7", size = 4270628, upload-time = "2026-04-08T01:57:12.885Z" },    { url = "https://files.pythonhosted.org/packages/8a/6c/1a42450f464dda6ffbe578a911f773e54dd48c10f9895a23a7e88b3e7db5/cryptography-46.0.7-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:128c5edfe5e5938b86b03941e94fac9ee793a94452ad1365c9fc3f4f62216832", size = 4415405, upload-time = "2026-04-08T01:57:14.923Z" },    { url = "https://files.pythonhosted.org/packages/9a/92/4ed714dbe93a066dc1f4b4581a464d2d7dbec9046f7c8b7016f5286329e2/cryptography-46.0.7-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5e51be372b26ef4ba3de3c167cd3d1022934bc838ae9eaad7e644986d2a3d163", size = 4272715, upload-time = "2026-04-08T01:57:16.638Z" },    { url = "https://files.pythonhosted.org/packages/b7/e6/a26b84096eddd51494bba19111f8fffe976f6a09f132706f8f1bf03f51f7/cryptography-46.0.7-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cdf1a610ef82abb396451862739e3fc93b071c844399e15b90726ef7470eeaf2", size = 4918400, upload-time = "2026-04-08T01:57:19.021Z" },    { url = "https://files.pythonhosted.org/packages/c7/08/ffd537b605568a148543ac3c2b239708ae0bd635064bab41359252ef88ed/cryptography-46.0.7-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1d25aee46d0c6f1a501adcddb2d2fee4b979381346a78558ed13e50aa8a59067", size = 4450634, upload-time = "2026-04-08T01:57:21.185Z" },    { url = "https://files.pythonhosted.org/packages/16/01/0cd51dd86ab5b9befe0d031e276510491976c3a80e9f6e31810cce46c4ad/cryptography-46.0.7-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:cdfbe22376065ffcf8be74dc9a909f032df19bc58a699456a21712d6e5eabfd0", size = 3985233, upload-time = "2026-04-08T01:57:22.862Z" },    { url = "https://files.pythonhosted.org/packages/92/49/819d6ed3a7d9349c2939f81b500a738cb733ab62fbecdbc1e38e83d45e12/cryptography-46.0.7-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:abad9dac36cbf55de6eb49badd4016806b3165d396f64925bf2999bcb67837ba", size = 4271955, upload-time = "2026-04-08T01:57:24.814Z" },    { url = "https://files.pythonhosted.org/packages/80/07/ad9b3c56ebb95ed2473d46df0847357e01583f4c52a85754d1a55e29e4d0/cryptography-46.0.7-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:935ce7e3cfdb53e3536119a542b839bb94ec1ad081013e9ab9b7cfd478b05006", size = 4879888, upload-time = "2026-04-08T01:57:26.88Z" },    { url = "https://files.pythonhosted.org/packages/b8/c7/201d3d58f30c4c2bdbe9b03844c291feb77c20511cc3586daf7edc12a47b/cryptography-46.0.7-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:35719dc79d4730d30f1c2b6474bd6acda36ae2dfae1e3c16f2051f215df33ce0", size = 4449961, upload-time = "2026-04-08T01:57:29.068Z" },    { url = "https://files.pythonhosted.org/packages/a5/ef/649750cbf96f3033c3c976e112265c33906f8e462291a33d77f90356548c/cryptography-46.0.7-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:7bbc6ccf49d05ac8f7d7b5e2e2c33830d4fe2061def88210a126d130d7f71a85", size = 4401696, upload-time = "2026-04-08T01:57:31.029Z" },    { url = "https://files.pythonhosted.org/packages/41/52/a8908dcb1a389a459a29008c29966c1d552588d4ae6d43f3a1a4512e0ebe/cryptography-46.0.7-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a1529d614f44b863a7b480c6d000fe93b59acee9c82ffa027cfadc77521a9f5e", size = 4664256, upload-time = "2026-04-08T01:57:33.144Z" },    { url = "https://files.pythonhosted.org/packages/4b/fa/f0ab06238e899cc3fb332623f337a7364f36f4bb3f2534c2bb95a35b132c/cryptography-46.0.7-cp38-abi3-win32.whl", hash = "sha256:f247c8c1a1fb45e12586afbb436ef21ff1e80670b2861a90353d9b025583d246", size = 3013001, upload-time = "2026-04-08T01:57:34.933Z" },    { url = "https://files.pythonhosted.org/packages/d2/f1/00ce3bde3ca542d1acd8f8cfa38e446840945aa6363f9b74746394b14127/cryptography-46.0.7-cp38-abi3-win_amd64.whl", hash = "sha256:506c4ff91eff4f82bdac7633318a526b1d1309fc07ca76a3ad182cb5b686d6d3", size = 3472985, upload-time = "2026-04-08T01:57:36.714Z" },    { url = "https://files.pythonhosted.org/packages/63/0c/dca8abb64e7ca4f6b2978769f6fea5ad06686a190cec381f0a796fdcaaba/cryptography-46.0.7-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fc9ab8856ae6cf7c9358430e49b368f3108f050031442eaeb6b9d87e4dcf4e4f", size = 3476879, upload-time = "2026-04-08T01:57:38.664Z" },    { url = "https://files.pythonhosted.org/packages/3a/ea/075aac6a84b7c271578d81a2f9968acb6e273002408729f2ddff517fed4a/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d3b99c535a9de0adced13d159c5a9cf65c325601aa30f4be08afd680643e9c15", size = 4219700, upload-time = "2026-04-08T01:57:40.625Z" },    { url = "https://files.pythonhosted.org/packages/6c/7b/1c55db7242b5e5612b29fc7a630e91ee7a6e3c8e7bf5406d22e206875fbd/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d02c738dacda7dc2a74d1b2b3177042009d5cab7c7079db74afc19e56ca1b455", size = 4385982, upload-time = "2026-04-08T01:57:42.725Z" },    { url = "https://files.pythonhosted.org/packages/cb/da/9870eec4b69c63ef5925bf7d8342b7e13bc2ee3d47791461c4e49ca212f4/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:04959522f938493042d595a736e7dbdff6eb6cc2339c11465b3ff89343b65f65", size = 4219115, upload-time = "2026-04-08T01:57:44.939Z" },    { url = "https://files.pythonhosted.org/packages/f4/72/05aa5832b82dd341969e9a734d1812a6aadb088d9eb6f0430fc337cc5a8f/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3986ac1dee6def53797289999eabe84798ad7817f3e97779b5061a95b0ee4968", size = 4385479, upload-time = "2026-04-08T01:57:46.86Z" },    { url = "https://files.pythonhosted.org/packages/20/2a/1b016902351a523aa2bd446b50a5bc1175d7a7d1cf90fe2ef904f9b84ebc/cryptography-46.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:258514877e15963bd43b558917bc9f54cf7cf866c38aa576ebf47a77ddbc43a4", size = 3412829, upload-time = "2026-04-08T01:57:48.874Z" },][[package]]name = "cssselect"version = "1.4.0"source = { registry = "https://pypi.org/simple" }sdist = { url = "https://files.pythonhosted.org/packages/ec/2e/cdfd8b01c37cbf4f9482eefd455853a3cf9c995029a46acd31dfaa9c1dd6/cssselect-1.4.0.tar.gz", hash = "sha256:fdaf0a1425e17dfe8c5cf66191d211b357cf7872ae8afc4c6762ddd8ac47fc92", size = 40589, upload-time = "2026-01-29T07:00:26.701Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/20/0c/7bb51e3acfafd16c48875bf3db03607674df16f5b6ef8d056586af7e2b8b/cssselect-1.4.0-py3-none-any.whl", hash = "sha256:c0ec5c0191c8ee39fcc8afc1540331d8b55b0183478c50e9c8a79d44dbceb1d8", size = 18540, upload-time = "2026-01-29T07:00:24.994Z" },][[package]]name = "defusedxml"version = "0.7.1"source = { registry = "https://pypi.org/simple" }sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520, upload-time = "2021-03-08T10:59:26.269Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" },][[package]]name = "django"version = "5.2.13"
@@ -418,15 +244,6 @@ wheels = [    { url = "https://files.pythonhosted.org/packages/e9/47/3d61d611609764aa71a37f7037b870e7bfb22937366974c4fd46cada7bab/django-6.0.4-py3-none-any.whl", hash = "sha256:14359c809fc16e8f81fd2b59d7d348e4d2d799da6840b10522b6edf7b8afc1da", size = 8368342, upload-time = "2026-04-07T13:55:37.999Z" },][[package]]name = "filelock"version = "3.25.2"source = { registry = "https://pypi.org/simple" }sdist = { url = "https://files.pythonhosted.org/packages/94/b8/00651a0f559862f3bb7d6f7477b192afe3f583cc5e26403b44e59a55ab34/filelock-3.25.2.tar.gz", hash = "sha256:b64ece2b38f4ca29dd3e810287aa8c48182bbecd1ae6e9ae126c9b35f1382694", size = 40480, upload-time = "2026-03-11T20:45:38.487Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/a4/a5/842ae8f0c08b61d6484b52f99a03510a3a72d23141942d216ebe81fefbce/filelock-3.25.2-py3-none-any.whl", hash = "sha256:ca8afb0da15f229774c9ad1b455ed96e85a81373065fb10446672f64444ddf70", size = 26759, upload-time = "2026-03-11T20:45:37.437Z" },][[package]]name = "flake8"version = "7.3.0"
@@ -462,18 +279,6 @@ wheels = [    { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },][[package]]name = "hyperlink"version = "21.0.0"source = { registry = "https://pypi.org/simple" }dependencies = [    { name = "idna" },]sdist = { url = "https://files.pythonhosted.org/packages/3a/51/1947bd81d75af87e3bb9e34593a4cf118115a8feb451ce7a69044ef1412e/hyperlink-21.0.0.tar.gz", hash = "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b", size = 140743, upload-time = "2021-01-08T05:51:20.972Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/6e/aa/8caf6a0a3e62863cbb9dab27135660acba46903b703e224f14f447e57934/hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4", size = 74638, upload-time = "2021-01-08T05:51:22.906Z" },][[package]]name = "idna"version = "3.11"
@@ -483,19 +288,6 @@ wheels = [    { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },][[package]]name = "incremental"version = "24.11.0"source = { registry = "https://pypi.org/simple" }dependencies = [    { name = "packaging" },    { name = "tomli", marker = "python_full_version < '3.11'" },]sdist = { url = "https://files.pythonhosted.org/packages/ef/3c/82e84109e02c492f382c711c58a3dd91badda6d746def81a1465f74dc9f5/incremental-24.11.0.tar.gz", hash = "sha256:87d3480dbb083c1d736222511a8cf380012a8176c2456d01ef483242abbbcf8c", size = 24000, upload-time = "2025-11-28T02:30:17.861Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/1d/55/0f4df2a44053867ea9cbea73fc588b03c55605cd695cee0a3d86f0029cb2/incremental-24.11.0-py3-none-any.whl", hash = "sha256:a34450716b1c4341fe6676a0598e88a39e04189f4dce5dc96f656e040baa10b3", size = 21109, upload-time = "2025-11-28T02:30:16.442Z" },][[package]]name = "isort"version = "8.0.1"
@@ -505,38 +297,6 @@ wheels = [    { url = "https://files.pythonhosted.org/packages/3e/95/c7c34aa53c16353c56d0b802fba48d5f5caa2cdee7958acbcb795c830416/isort-8.0.1-py3-none-any.whl", hash = "sha256:28b89bc70f751b559aeca209e6120393d43fbe2490de0559662be7a9787e3d75", size = 89733, upload-time = "2026-02-28T10:08:19.466Z" },][[package]]name = "itemadapter"version = "0.13.1"source = { registry = "https://pypi.org/simple" }sdist = { url = "https://files.pythonhosted.org/packages/52/47/4c75c5396941e653d5f864389964da6951e8f338c6739602dd778f62333e/itemadapter-0.13.1.tar.gz", hash = "sha256:fa139c7be2aa80f8874b2f23d165d5d4aa47c4b85c54ab530b567fd5f684f1b4", size = 32343, upload-time = "2026-01-08T17:56:38.863Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/db/a6/48805cef65b13644f1c23545dc525a7051581c84f5227efb1cd9a8ac9b02/itemadapter-0.13.1-py3-none-any.whl", hash = "sha256:f3c6b1babb4fb6cca4aa9061ef0b0c25c783c24a571c30e3667e7bcfea41815b", size = 18540, upload-time = "2026-01-08T17:56:37.29Z" },][[package]]name = "itemloaders"version = "1.4.0"source = { registry = "https://pypi.org/simple" }dependencies = [    { name = "itemadapter" },    { name = "jmespath" },    { name = "parsel" },]sdist = { url = "https://files.pythonhosted.org/packages/05/bd/916f4fd26e14e6ad292b69693ccca4f192bcaf9f817ba7d6f7162dbbd835/itemloaders-1.4.0.tar.gz", hash = "sha256:b5338308a819098f43525b7afc5f7d46ba338ba4710f5ebe7a21b3b47bb29929", size = 29740, upload-time = "2026-01-29T12:50:38.04Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/ac/71/d9cd0e4c6a4aace991009fc47362ce9251be0fbcf2b6c533f918b31854d5/itemloaders-1.4.0-py3-none-any.whl", hash = "sha256:202b6f855299b4cadfdf78bb93a6cf977899e3c40c4c54524e120a444e65b5ac", size = 12188, upload-time = "2026-01-29T12:50:36.148Z" },][[package]]name = "jmespath"version = "1.1.0"source = { registry = "https://pypi.org/simple" }sdist = { url = "https://files.pythonhosted.org/packages/d3/59/322338183ecda247fb5d1763a6cbe46eff7222eaeebafd9fa65d4bf5cb11/jmespath-1.1.0.tar.gz", hash = "sha256:472c87d80f36026ae83c6ddd0f1d05d4e510134ed462851fd5f754c8c3cbb88d", size = 27377, upload-time = "2026-01-22T16:35:26.279Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/14/2f/967ba146e6d58cf6a652da73885f52fc68001525b4197effc174321d70b4/jmespath-1.1.0-py3-none-any.whl", hash = "sha256:a5663118de4908c91729bea0acadca56526eb2698e83de10cd116ae0f4e97c64", size = 20419, upload-time = "2026-01-22T16:35:24.919Z" },][[package]]name = "lxml"version = "6.0.3"
@@ -682,22 +442,6 @@ wheels = [    { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" },][[package]]name = "parsel"version = "1.11.0"source = { registry = "https://pypi.org/simple" }dependencies = [    { name = "cssselect" },    { name = "jmespath" },    { name = "lxml" },    { name = "packaging" },    { name = "w3lib" },]sdist = { url = "https://files.pythonhosted.org/packages/91/c8/4ace3a5c61e39ca21734a5715d0e076eea6200dd8daea2a5b99452f5a0d6/parsel-1.11.0.tar.gz", hash = "sha256:5925fe087eb16fc404a7ed91e31e2c1e2a9b230da4b64f34d81358c0d0e27e88", size = 106849, upload-time = "2026-01-29T07:19:23.388Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/08/23/4e0dae5e5bee14aea26dba003a682e621563451a20f751ed985810f818b6/parsel-1.11.0-py3-none-any.whl", hash = "sha256:bda82575df1774dd64e1c1396163f3cadb3e383e0f8080d43d45fa6705355daa", size = 14176, upload-time = "2026-01-29T07:19:22.255Z" },][[package]]name = "pathspec"version = "1.0.4"
@@ -716,36 +460,6 @@ wheels = [    { url = "https://files.pythonhosted.org/packages/75/a6/a0a304dc33b49145b21f4808d763822111e67d1c3a32b524a1baf947b6e1/platformdirs-4.9.6-py3-none-any.whl", hash = "sha256:e61adb1d5e5cb3441b4b7710bea7e4c12250ca49439228cc1021c00dcfac0917", size = 21348, upload-time = "2026-04-09T00:04:09.463Z" },][[package]]name = "protego"version = "0.6.0"source = { registry = "https://pypi.org/simple" }sdist = { url = "https://files.pythonhosted.org/packages/07/a7/955c422611d00a6e4a06d30b367ea9bb4fb09d48552e92aef1ba312493c7/protego-0.6.0.tar.gz", hash = "sha256:3466f41438421cf90008e98534d5fde47dc16a17482571d021143ac18b70ace9", size = 3137423, upload-time = "2026-01-29T10:58:28.267Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/d8/8c/f4dd590f48addf31398f78a78962eaa99eb4c87ac09c1927497032644731/protego-0.6.0-py3-none-any.whl", hash = "sha256:7210e6e06a8db839502baf1bfbcb810689a58e394d31408ef1ef9e4e3d79fc44", size = 10313, upload-time = "2026-01-29T10:58:26.748Z" },][[package]]name = "pyasn1"version = "0.6.3"source = { registry = "https://pypi.org/simple" }sdist = { url = "https://files.pythonhosted.org/packages/5c/5f/6583902b6f79b399c9c40674ac384fd9cd77805f9e6205075f828ef11fb2/pyasn1-0.6.3.tar.gz", hash = "sha256:697a8ecd6d98891189184ca1fa05d1bb00e2f84b5977c481452050549c8a72cf", size = 148685, upload-time = "2026-03-17T01:06:53.382Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/5d/a0/7d793dce3fa811fe047d6ae2431c672364b462850c6235ae306c0efd025f/pyasn1-0.6.3-py3-none-any.whl", hash = "sha256:a80184d120f0864a52a073acc6fc642847d0be408e7c7252f31390c0f4eadcde", size = 83997, upload-time = "2026-03-17T01:06:52.036Z" },][[package]]name = "pyasn1-modules"version = "0.4.2"source = { registry = "https://pypi.org/simple" }dependencies = [    { name = "pyasn1" },]sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" },][[package]]name = "pycodestyle"version = "2.14.0"
@@ -755,24 +469,6 @@ wheels = [    { url = "https://files.pythonhosted.org/packages/d7/27/a58ddaf8c588a3ef080db9d0b7e0b97215cee3a45df74f3a94dbbf5c893a/pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d", size = 31594, upload-time = "2025-06-20T18:49:47.491Z" },][[package]]name = "pycparser"version = "3.0"source = { registry = "https://pypi.org/simple" }sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" },][[package]]name = "pydispatcher"version = "2.0.7"source = { registry = "https://pypi.org/simple" }sdist = { url = "https://files.pythonhosted.org/packages/21/db/030d0700ae90d2f9d52c2f3c1f864881e19cef8cba3b0a08759c8494c19c/PyDispatcher-2.0.7.tar.gz", hash = "sha256:b777c6ad080dc1bad74a4c29d6a46914fa6701ac70f94b0d66fbcfde62f5be31", size = 38891, upload-time = "2023-02-17T20:11:13.106Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/66/0e/9ee7bc0b48ec45d93b302fa2d787830dca4dc454d31a237faa5815995988/PyDispatcher-2.0.7-py3-none-any.whl", hash = "sha256:96543bea04115ffde08f851e1d45cacbfd1ee866ac42127d9b476dc5aefa7de0", size = 12040, upload-time = "2023-02-17T20:11:11.991Z" },][[package]]name = "pyflakes"version = "3.4.0"
@@ -782,25 +478,6 @@ wheels = [    { url = "https://files.pythonhosted.org/packages/c2/2f/81d580a0fb83baeb066698975cb14a618bdbed7720678566f1b046a95fe8/pyflakes-3.4.0-py2.py3-none-any.whl", hash = "sha256:f742a7dbd0d9cb9ea41e9a24a918996e8170c799fa528688d40dd582c8265f4f", size = 63551, upload-time = "2025-06-20T18:45:26.937Z" },][[package]]name = "pyopenssl"version = "26.0.0"source = { registry = "https://pypi.org/simple" }dependencies = [    { name = "cryptography" },    { name = "typing-extensions", marker = "python_full_version < '3.13'" },]sdist = { url = "https://files.pythonhosted.org/packages/8e/11/a62e1d33b373da2b2c2cd9eb508147871c80f12b1cacde3c5d314922afdd/pyopenssl-26.0.0.tar.gz", hash = "sha256:f293934e52936f2e3413b89c6ce36df66a0b34ae1ea3a053b8c5020ff2f513fc", size = 185534, upload-time = "2026-03-15T14:28:26.353Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/fb/7d/d4f7d908fa8415571771b30669251d57c3cf313b36a856e6d7548ae01619/pyopenssl-26.0.0-py3-none-any.whl", hash = "sha256:df94d28498848b98cc1c0ffb8ef1e71e40210d3b0a8064c9d29571ed2904bf81", size = 57969, upload-time = "2026-03-15T14:28:24.864Z" },][[package]]name = "pypydispatcher"version = "2.1.2"source = { registry = "https://pypi.org/simple" }sdist = { url = "https://files.pythonhosted.org/packages/d5/7b/65f55513d3c769fd677f90032d8d8703e3dc17e88a41b6074d2177548bca/PyPyDispatcher-2.1.2.tar.gz", hash = "sha256:b6bec5dfcff9d2535bca2b23c80eae367b1ac250a645106948d315fcfa9130f2", size = 23224, upload-time = "2017-07-03T14:20:51.806Z" }[[package]]name = "pytokens"version = "0.4.1"
@@ -840,15 +517,6 @@ wheels = [    { url = "https://files.pythonhosted.org/packages/c6/78/397db326746f0a342855b81216ae1f0a32965deccfd7c830a2dbc66d2483/pytokens-0.4.1-py3-none-any.whl", hash = "sha256:26cef14744a8385f35d0e095dc8b3a7583f6c953c2e3d269c7f82484bf5ad2de", size = 13729, upload-time = "2026-01-30T01:03:45.029Z" },][[package]]name = "queuelib"version = "1.9.0"source = { registry = "https://pypi.org/simple" }sdist = { url = "https://files.pythonhosted.org/packages/76/f3/d80ab8c7c91b8c42d9a2aa4dd97a8be1321e7b26000c2675b75e641d958c/queuelib-1.9.0.tar.gz", hash = "sha256:b12fea79fd8c1dd23e212b1f3db58003b773949801d4f4e6f34d882467d4a192", size = 11729, upload-time = "2026-01-29T11:19:37.065Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/4a/1c/8df7b461497b42fcc1e7c44529201975ec77b0e1ebecd00df4b1f096c1d4/queuelib-1.9.0-py3-none-any.whl", hash = "sha256:c5fd3bebf2c924446fa94fca6b72e81168f79cf4c2a9143b8b26f266a423fcf3", size = 13585, upload-time = "2026-01-29T11:19:35.616Z" },][[package]]name = "requests"version = "2.33.1"
@@ -865,59 +533,12 @@ wheels = [][[package]]name = "requests-file"version = "3.0.1"source = { registry = "https://pypi.org/simple" }dependencies = [    { name = "requests" },]sdist = { url = "https://files.pythonhosted.org/packages/3c/f8/5dc70102e4d337063452c82e1f0d95e39abfe67aa222ed8a5ddeb9df8de8/requests_file-3.0.1.tar.gz", hash = "sha256:f14243d7796c588f3521bd423c5dea2ee4cc730e54a3cac9574d78aca1272576", size = 6967, upload-time = "2025-10-20T18:56:42.279Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/e1/d5/de8f089119205a09da657ed4784c584ede8381a0ce6821212a6d4ca47054/requests_file-3.0.1-py2.py3-none-any.whl", hash = "sha256:d0f5eb94353986d998f80ac63c7f146a307728be051d4d1cd390dbdb59c10fa2", size = 4514, upload-time = "2025-10-20T18:56:41.184Z" },][[package]]name = "scrapy"version = "2.15.0"source = { registry = "https://pypi.org/simple" }dependencies = [    { name = "cryptography" },    { name = "cssselect" },    { name = "defusedxml" },    { name = "itemadapter" },    { name = "itemloaders" },    { name = "lxml" },    { name = "packaging" },    { name = "parsel" },    { name = "protego" },    { name = "pydispatcher", marker = "platform_python_implementation == 'CPython'" },    { name = "pyopenssl" },    { name = "pypydispatcher", marker = "platform_python_implementation == 'PyPy'" },    { name = "queuelib" },    { name = "service-identity" },    { name = "tldextract" },    { name = "twisted" },    { name = "w3lib" },    { name = "zope-interface" },]sdist = { url = "https://files.pythonhosted.org/packages/b8/7b/48608f18bd0fe3a51124fe14cb48690dc0768aea52e66faf632467940509/scrapy-2.15.0.tar.gz", hash = "sha256:8072b42da2b54ccafaf188298c8da851197e83f6d4a19db324777ee2e204b70c", size = 1290115, upload-time = "2026-04-09T12:02:11.92Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/d4/f1/88c440a6b5a331e0f11d1d45442ae538bc52a26828bbe9755abff00afa48/scrapy-2.15.0-py3-none-any.whl", hash = "sha256:7d7a421fb75711c096cf8ad8b3d5090a4fc403e04abe4d38f86d612666fe07ac", size = 352486, upload-time = "2026-04-09T12:02:09.99Z" },][[package]]name = "service-identity"version = "24.2.0"name = "soupsieve"version = "2.8.3"source = { registry = "https://pypi.org/simple" }dependencies = [    { name = "attrs" },    { name = "cryptography" },    { name = "pyasn1" },    { name = "pyasn1-modules" },]sdist = { url = "https://files.pythonhosted.org/packages/07/a5/dfc752b979067947261dbbf2543470c58efe735c3c1301dd870ef27830ee/service_identity-24.2.0.tar.gz", hash = "sha256:b8683ba13f0d39c6cd5d625d2c5f65421d6d707b013b375c355751557cbe8e09", size = 39245, upload-time = "2024-10-26T07:21:57.736Z" }sdist = { url = "https://files.pythonhosted.org/packages/7b/ae/2d9c981590ed9999a0d91755b47fc74f74de286b0f5cee14c9269041e6c4/soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349", size = 118627, upload-time = "2026-01-20T04:27:02.457Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/08/2c/ca6dd598b384bc1ce581e24aaae0f2bed4ccac57749d5c3befbb5e742081/service_identity-24.2.0-py3-none-any.whl", hash = "sha256:6b047fbd8a84fd0bb0d55ebce4031e400562b9196e1e0d3e0fe2b8a59f6d4a85", size = 11364, upload-time = "2024-10-26T07:21:56.302Z" },    { url = "https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95", size = 37016, upload-time = "2026-01-20T04:27:01.012Z" },][[package]]
@@ -934,14 +555,14 @@ name = "status"version = "0.0.1"source = { virtual = "." }dependencies = [    { name = "beautifulsoup4" },    { name = "django", version = "5.2.13", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" },    { name = "django", version = "6.0.4", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },    { name = "gunicorn" },    { name = "lxml" },    { name = "requests" },    { name = "scrapy" },    { name = "tzdata" },    { name = "uvicorn" },    { name = "w3lib" },    { name = "whitenoise" },]
@@ -954,13 +575,13 @@ dev = [[package.metadata]requires-dist = [    { name = "beautifulsoup4" },    { name = "django" },    { name = "gunicorn" },    { name = "lxml" },    { name = "requests" },    { name = "scrapy" },    { name = "tzdata" },    { name = "uvicorn" },    { name = "w3lib" },    { name = "whitenoise" },]
@@ -971,21 +592,6 @@ dev = [    { name = "isort" },][[package]]name = "tldextract"version = "5.3.1"source = { registry = "https://pypi.org/simple" }dependencies = [    { name = "filelock" },    { name = "idna" },    { name = "requests" },    { name = "requests-file" },]sdist = { url = "https://files.pythonhosted.org/packages/65/7b/644fbbb49564a6cb124a8582013315a41148dba2f72209bba14a84242bf0/tldextract-5.3.1.tar.gz", hash = "sha256:a72756ca170b2510315076383ea2993478f7da6f897eef1f4a5400735d5057fb", size = 126105, upload-time = "2025-12-28T23:58:05.532Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/6d/42/0e49d6d0aac449ca71952ec5bae764af009754fcb2e76a5cc097543747b3/tldextract-5.3.1-py3-none-any.whl", hash = "sha256:6bfe36d518de569c572062b788e16a659ccaceffc486d243af0484e8ecf432d9", size = 105886, upload-time = "2025-12-28T23:58:04.071Z" },][[package]]name = "tomli"version = "2.4.1"
@@ -1040,24 +646,6 @@ wheels = [    { url = "https://files.pythonhosted.org/packages/7b/61/cceae43728b7de99d9b847560c262873a1f6c98202171fd5ed62640b494b/tomli-2.4.1-py3-none-any.whl", hash = "sha256:0d85819802132122da43cb86656f8d1f8c6587d54ae7dcaf30e90533028b49fe", size = 14583, upload-time = "2026-03-25T20:22:03.012Z" },][[package]]name = "twisted"version = "25.5.0"source = { registry = "https://pypi.org/simple" }dependencies = [    { name = "attrs" },    { name = "automat" },    { name = "constantly" },    { name = "hyperlink" },    { name = "incremental" },    { name = "typing-extensions" },    { name = "zope-interface" },]sdist = { url = "https://files.pythonhosted.org/packages/13/0f/82716ed849bf7ea4984c21385597c949944f0f9b428b5710f79d0afc084d/twisted-25.5.0.tar.gz", hash = "sha256:1deb272358cb6be1e3e8fc6f9c8b36f78eb0fa7c2233d2dbe11ec6fee04ea316", size = 3545725, upload-time = "2025-06-07T09:52:24.858Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/eb/66/ab7efd8941f0bc7b2bd555b0f0471bff77df4c88e0cc31120c82737fec77/twisted-25.5.0-py3-none-any.whl", hash = "sha256:8559f654d01a54a8c3efe66d533d43f383531ebf8d81d9f9ab4769d91ca15df7", size = 3204767, upload-time = "2025-06-07T09:52:21.428Z" },][[package]]name = "typing-extensions"version = "4.15.0"
@@ -1099,15 +687,6 @@ wheels = [    { url = "https://files.pythonhosted.org/packages/b7/23/a5bbd9600dd607411fa644c06ff4951bec3a4d82c4b852374024359c19c0/uvicorn-0.44.0-py3-none-any.whl", hash = "sha256:ce937c99a2cc70279556967274414c087888e8cec9f9c94644dfca11bd3ced89", size = 69425, upload-time = "2026-04-06T09:23:21.524Z" },][[package]]name = "w3lib"version = "2.4.1"source = { registry = "https://pypi.org/simple" }sdist = { url = "https://files.pythonhosted.org/packages/c0/91/b2eb59c2cf243de5de1e91c963655df78c015509f51297685a8c86a27b8c/w3lib-2.4.1.tar.gz", hash = "sha256:8dd69ee39ff6398d708c793abc779c334a69bac7cee1cdf71736c669ed6be864", size = 48494, upload-time = "2026-03-20T09:50:27.477Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/66/c3/f8b216cbd742e5b84c40f045204c764ccb7524d2aeab021054ec69446b0a/w3lib-2.4.1-py3-none-any.whl", hash = "sha256:40930132907e68de906a5b89331ab8c8ff4f01bd35b5539ef7896017d814138d", size = 21695, upload-time = "2026-03-20T09:50:26.187Z" },][[package]]name = "whitenoise"version = "6.12.0"
@@ -1116,41 +695,3 @@ sdist = { url = "https://files.pythonhosted.org/packages/cb/2a/55b3f3a4ec326cd07wheels = [    { url = "https://files.pythonhosted.org/packages/db/eb/d5583a11486211f3ebd4b385545ae787f32363d453c19fffd81106c9c138/whitenoise-6.12.0-py3-none-any.whl", hash = "sha256:fc5e8c572e33ebf24795b47b6a7da8da3c00cff2349f5b04c02f28d0cc5a3cc2", size = 20302, upload-time = "2026-02-27T00:05:40.086Z" },][[package]]name = "zope-interface"version = "8.2"source = { registry = "https://pypi.org/simple" }sdist = { url = "https://files.pythonhosted.org/packages/86/a4/77daa5ba398996d16bb43fc721599d27d03eae68fe3c799de1963c72e228/zope_interface-8.2.tar.gz", hash = "sha256:afb20c371a601d261b4f6edb53c3c418c249db1a9717b0baafc9a9bb39ba1224", size = 254019, upload-time = "2026-01-09T07:51:07.253Z" }wheels = [    { url = "https://files.pythonhosted.org/packages/b1/fa/6d9eb3a33998a3019d7eb4fa1802d01d6602fad90e0aea443e6e0fe8e49a/zope_interface-8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:788c293f3165964ec6527b2d861072c68eef53425213f36d3893ebee89a89623", size = 207541, upload-time = "2026-01-09T08:04:55.378Z" },    { url = "https://files.pythonhosted.org/packages/19/8c/ad23c96fdee84cb1f768f6695dac187cc26e9038e01c69713ba0f7dc46ab/zope_interface-8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9a4e785097e741a1c953b3970ce28f2823bd63c00adc5d276f2981dd66c96c15", size = 208075, upload-time = "2026-01-09T08:04:57.118Z" },    { url = "https://files.pythonhosted.org/packages/dd/35/1bfd5fec31a307f0cf4065ee74ade63858ded3e2a71e248f1508118fcc95/zope_interface-8.2-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:16c69da19a06566664ddd4785f37cad5693a51d48df1515d264c20d005d322e2", size = 249528, upload-time = "2026-01-09T08:04:59.074Z" },    { url = "https://files.pythonhosted.org/packages/c6/3a/5d50b5fdb0f8226a2edff6adb7efdd3762ec95dff827dbab1761cb9a9e85/zope_interface-8.2-cp310-cp310-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c31acfa3d7cde48bec45701b0e1f4698daffc378f559bfb296837d8c834732f6", size = 254646, upload-time = "2026-01-09T08:05:00.964Z" },    { url = "https://files.pythonhosted.org/packages/2f/2a/ee7d675e151578eaf77828b8faac2b7ed9a69fead350bf5cf0e4afe7c73d/zope_interface-8.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0723507127f8269b8f3f22663168f717e9c9742107d1b6c9f419df561b71aa6d", size = 255083, upload-time = "2026-01-09T08:05:02.857Z" },    { url = "https://files.pythonhosted.org/packages/5d/07/99e2342f976c3700e142eddc01524e375a9e9078869a6885d9c72f3a3659/zope_interface-8.2-cp310-cp310-win_amd64.whl", hash = "sha256:3bf73a910bb27344def2d301a03329c559a79b308e1e584686b74171d736be4e", size = 211924, upload-time = "2026-01-09T08:05:04.702Z" },    { url = "https://files.pythonhosted.org/packages/98/97/9c2aa8caae79915ed64eb114e18816f178984c917aa9adf2a18345e4f2e5/zope_interface-8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c65ade7ea85516e428651048489f5e689e695c79188761de8c622594d1e13322", size = 208081, upload-time = "2026-01-09T08:05:06.623Z" },    { url = "https://files.pythonhosted.org/packages/34/86/4e2fcb01a8f6780ac84923748e450af0805531f47c0956b83065c99ab543/zope_interface-8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1ef4b43659e1348f35f38e7d1a6bbc1682efde239761f335ffc7e31e798b65b", size = 208522, upload-time = "2026-01-09T08:05:07.986Z" },    { url = "https://files.pythonhosted.org/packages/f6/eb/08e277da32ddcd4014922854096cf6dcb7081fad415892c2da1bedefbf02/zope_interface-8.2-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:dfc4f44e8de2ff4eba20af4f0a3ca42d3c43ab24a08e49ccd8558b7a4185b466", size = 255198, upload-time = "2026-01-09T08:05:09.532Z" },    { url = "https://files.pythonhosted.org/packages/ea/a1/b32484f3281a5dc83bc713ad61eca52c543735cdf204543172087a074a74/zope_interface-8.2-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8f094bfb49179ec5dc9981cb769af1275702bd64720ef94874d9e34da1390d4c", size = 259970, upload-time = "2026-01-09T08:05:11.477Z" },    { url = "https://files.pythonhosted.org/packages/f6/81/bca0e8ae1e487d4093a8a7cfed2118aa2d4758c8cfd66e59d2af09d71f1c/zope_interface-8.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d2bb8e7364e18f083bf6744ccf30433b2a5f236c39c95df8514e3c13007098ce", size = 261153, upload-time = "2026-01-09T08:05:13.402Z" },    { url = "https://files.pythonhosted.org/packages/40/1e/e3ff2a708011e56b10b271b038d4cb650a8ad5b7d24352fe2edf6d6b187a/zope_interface-8.2-cp311-cp311-win_amd64.whl", hash = "sha256:6f4b4dfcfdfaa9177a600bb31cebf711fdb8c8e9ed84f14c61c420c6aa398489", size = 212330, upload-time = "2026-01-09T08:05:15.267Z" },    { url = "https://files.pythonhosted.org/packages/e0/a0/1e1fabbd2e9c53ef92b69df6d14f4adc94ec25583b1380336905dc37e9a0/zope_interface-8.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:624b6787fc7c3e45fa401984f6add2c736b70a7506518c3b537ffaacc4b29d4c", size = 208785, upload-time = "2026-01-09T08:05:17.348Z" },    { url = "https://files.pythonhosted.org/packages/c3/2a/88d098a06975c722a192ef1fb7d623d1b57c6a6997cf01a7aabb45ab1970/zope_interface-8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bc9ded9e97a0ed17731d479596ed1071e53b18e6fdb2fc33af1e43f5fd2d3aaa", size = 208976, upload-time = "2026-01-09T08:05:18.792Z" },    { url = "https://files.pythonhosted.org/packages/e9/e8/757398549fdfd2f8c89f32c82ae4d2f0537ae2a5d2f21f4a2f711f5a059f/zope_interface-8.2-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:532367553e4420c80c0fc0cabcc2c74080d495573706f66723edee6eae53361d", size = 259411, upload-time = "2026-01-09T08:05:20.567Z" },    { url = "https://files.pythonhosted.org/packages/91/af/502601f0395ce84dff622f63cab47488657a04d0065547df42bee3a680ff/zope_interface-8.2-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2bf9cf275468bafa3c72688aad8cfcbe3d28ee792baf0b228a1b2d93bd1d541a", size = 264859, upload-time = "2026-01-09T08:05:22.234Z" },    { url = "https://files.pythonhosted.org/packages/89/0c/d2f765b9b4814a368a7c1b0ac23b68823c6789a732112668072fe596945d/zope_interface-8.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0009d2d3c02ea783045d7804da4fd016245e5c5de31a86cebba66dd6914d59a2", size = 264398, upload-time = "2026-01-09T08:05:23.853Z" },    { url = "https://files.pythonhosted.org/packages/4a/81/2f171fbc4222066957e6b9220c4fb9146792540102c37e6d94e5d14aad97/zope_interface-8.2-cp312-cp312-win_amd64.whl", hash = "sha256:845d14e580220ae4544bd4d7eb800f0b6034fe5585fc2536806e0a26c2ee6640", size = 212444, upload-time = "2026-01-09T08:05:25.148Z" },    { url = "https://files.pythonhosted.org/packages/66/47/45188fb101fa060b20e6090e500682398ab415e516a0c228fbb22bc7def2/zope_interface-8.2-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:6068322004a0158c80dfd4708dfb103a899635408c67c3b10e9acec4dbacefec", size = 209170, upload-time = "2026-01-09T08:05:26.616Z" },    { url = "https://files.pythonhosted.org/packages/09/03/f6b9336c03c2b48403c4eb73a1ec961d94dc2fb5354c583dfb5fa05fd41f/zope_interface-8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2499de92e8275d0dd68f84425b3e19e9268cd1fa8507997900fa4175f157733c", size = 209229, upload-time = "2026-01-09T08:05:28.521Z" },    { url = "https://files.pythonhosted.org/packages/07/b1/65fe1dca708569f302ade02e6cdca309eab6752bc9f80105514f5b708651/zope_interface-8.2-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f777e68c76208503609c83ca021a6864902b646530a1a39abb9ed310d1100664", size = 259393, upload-time = "2026-01-09T08:05:29.897Z" },    { url = "https://files.pythonhosted.org/packages/eb/a5/97b49cfceb6ed53d3dcfb3f3ebf24d83b5553194f0337fbbb3a9fec6cf78/zope_interface-8.2-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b05a919fdb0ed6ea942e5a7800e09a8b6cdae6f98fee1bef1c9d1a3fc43aaa0", size = 264863, upload-time = "2026-01-09T08:05:31.501Z" },    { url = "https://files.pythonhosted.org/packages/cb/02/0b7a77292810efe3a0586a505b077ebafd5114e10c6e6e659f0c8e387e1f/zope_interface-8.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ccc62b5712dd7bd64cfba3ee63089fb11e840f5914b990033beeae3b2180b6cb", size = 264369, upload-time = "2026-01-09T08:05:32.941Z" },    { url = "https://files.pythonhosted.org/packages/fb/1d/0d1ff3846302ed1b5bbf659316d8084b30106770a5f346b7ff4e9f540f80/zope_interface-8.2-cp313-cp313-win_amd64.whl", hash = "sha256:34f877d1d3bb7565c494ed93828fa6417641ca26faf6e8f044e0d0d500807028", size = 212447, upload-time = "2026-01-09T08:05:35.064Z" },    { url = "https://files.pythonhosted.org/packages/1a/da/3c89de3917751446728b8898b4d53318bc2f8f6bf8196e150a063c59905e/zope_interface-8.2-cp314-cp314-macosx_10_9_x86_64.whl", hash = "sha256:46c7e4e8cbc698398a67e56ca985d19cb92365b4aafbeb6a712e8c101090f4cb", size = 209223, upload-time = "2026-01-09T08:05:36.449Z" },    { url = "https://files.pythonhosted.org/packages/00/7f/62d00ec53f0a6e5df0c984781e6f3999ed265129c4c3413df8128d1e0207/zope_interface-8.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a87fc7517f825a97ff4a4ca4c8a950593c59e0f8e7bfe1b6f898a38d5ba9f9cf", size = 209366, upload-time = "2026-01-09T08:05:38.197Z" },    { url = "https://files.pythonhosted.org/packages/ef/a2/f241986315174be8e00aabecfc2153cf8029c1327cab8ed53a9d979d7e08/zope_interface-8.2-cp314-cp314-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:ccf52f7d44d669203c2096c1a0c2c15d52e36b2e7a9413df50f48392c7d4d080", size = 261037, upload-time = "2026-01-09T08:05:39.568Z" },    { url = "https://files.pythonhosted.org/packages/02/cc/b321c51d6936ede296a1b8860cf173bee2928357fe1fff7f97234899173f/zope_interface-8.2-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:aae807efc7bd26302eb2fea05cd6de7d59269ed6ae23a6de1ee47add6de99b8c", size = 264219, upload-time = "2026-01-09T08:05:41.624Z" },    { url = "https://files.pythonhosted.org/packages/ab/fb/5f5e7b40a2f4efd873fe173624795ca47eaa22e29051270c981361b45209/zope_interface-8.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:05a0e42d6d830f547e114de2e7cd15750dc6c0c78f8138e6c5035e51ddfff37c", size = 264390, upload-time = "2026-01-09T08:05:42.936Z" },    { url = "https://files.pythonhosted.org/packages/f9/82/3f2bc594370bc3abd58e5f9085d263bf682a222f059ed46275cde0570810/zope_interface-8.2-cp314-cp314-win_amd64.whl", hash = "sha256:561ce42390bee90bae51cf1c012902a8033b2aaefbd0deed81e877562a116d48", size = 212585, upload-time = "2026-01-09T08:05:44.419Z" },]