Uupdump Now
# ------------------------------ # Configuration # ------------------------------ DEFAULT_WORK_DIR = Path("UUP_workspace") UUP_METADATA_URL = "https://uupdump.net/get.php?id={build}&pack={lang}&edition={edition}" UUP_FILE_LIST_URL = "https://uupdump.net/f/{build}/{lang}/files.json"
def download_files_parallel(file_list, download_dir, max_workers=8): """Download list of (url, path, size, sha1) in parallel.""" with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor: futures = [] for url, path, size, sha1 in file_list: futures.append(executor.submit(download_file, url, path, size, sha1)) for future in concurrent.futures.as_completed(futures): future.result() # raise if any failed uupdump
def download_uup_files(uup_data: Dict, work_dir: Path, edition: str): """Download all required CAB/PSF files for given edition.""" files = uup_data.get("files", []) edition_files = [f for f in files if edition in f.get("editions", [])] if not edition_files: raise ValueError(f"No files found for edition {edition}") download_list = [] for f in edition_files: url = f["url"] local_path = work_dir / "uup_files" / f["name"] download_list.append((url, local_path, f.get("size"), f.get("sha1"))) print(f"Downloading {len(download_list)} files for {edition}") download_files_parallel(download_list, work_dir / "uup_files") return work_dir / "uup_files" max_workers=8): """Download list of (url