#!/usr/bin/env python3 import urllib3 import json http = urllib3.PoolManager() dl_dir = http.request("GET", "https://download.nvidia.com/XFree86/Linux-x86_64/") # print(f"{dl_dir.status=}\n{dl_dir.data=}") assert (dl_dir.status < 400), "Error probably occurred" def find_versions(dir_html: bytes) -> list[str]: # this algorithm obviously need recursion because we need to discover the items def _rec(dir_html: bytes, start: int = 0, so_far: list[str] = []) -> list[str]: MATCH_START = b"= 400: return err_fn(res.status) return then_fn(res.data) identity = lambda e: e def _fail(msg: str|None=None): if msg: assert False, msg assert False none_id = lambda _: None def get_sha256(version: str) -> str | None: for url in sha256_urls(version): res = http.request("GET", url) # print(f"attempting: {url}") if res.status < 400: return res.data.decode().split()[0] return None fetch_data = [(v, download_urls(v)[0], get_sha256(v)) for v in versions] fetch_data.append(("latest", *fetch_data[-1][1:])) # print(fetch_data) # now print the JSON object print(json.dumps({ version: { "url": dl_url, "sha256": sha256 } for (version, dl_url, sha256) in fetch_data if sha256 is not None}, indent=4)) # execution: fetch.py >nvidia_versions.json