Uupdump Link › 【ESSENTIAL】
def download_uup_files(uup_data: Dict, work_dir: Path, edition: str): """Download all required CAB/PSF files for given edition.""" files = uup_data.get("files", []) edition_files = [f for f in files if edition in f.get("editions", [])] if not edition_files: raise ValueError(f"No files found for edition {edition}") download_list = [] for f in edition_files: url = f["url"] local_path = work_dir / "uup_files" / f["name"] download_list.append((url, local_path, f.get("size"), f.get("sha1"))) print(f"Downloading {len(download_list)} files for {edition}") download_files_parallel(download_list, work_dir / "uup_files") return work_dir / "uup_files"
# ------------------------------ # Helper functions # ------------------------------ def run_cmd(cmd, cwd=None, check=True): """Run shell command and return output.""" print(f"[RUN] {' '.join(cmd)}") result = subprocess.run(cmd, cwd=cwd, capture_output=True, text=True) if check and result.returncode != 0: print(f"ERROR: {result.stderr}") raise RuntimeError(f"Command failed: {' '.join(cmd)}") return result.stdout.strip()
# ------------------------------ # Configuration # ------------------------------ DEFAULT_WORK_DIR = Path("UUP_workspace") UUP_METADATA_URL = "https://uupdump.net/get.php?id={build}&pack={lang}&edition={edition}" UUP_FILE_LIST_URL = "https://uupdump.net/f/{build}/{lang}/files.json" uupdump
# Resume support headers = {} if dest_path.exists(): existing_size = dest_path.stat().st_size if expected_size and existing_size == expected_size: print(f" [SKIP] {dest_path.name} already complete") return True headers['Range'] = f'bytes={existing_size}-' print(f" [DL] {url}") resp = requests.get(url, stream=True, headers=headers) resp.raise_for_status() mode = 'ab' if 'Range' in headers else 'wb' with open(dest_path, mode) as f: for chunk in resp.iter_content(chunk_size=8192): f.write(chunk) # Verify size if expected_size and dest_path.stat().st_size != expected_size: raise ValueError(f"Size mismatch for {dest_path.name}") # Verify SHA‑1 if expected_sha1: sha1 = hashlib.sha1() with open(dest_path, 'rb') as f: for chunk in iter(lambda: f.read(65536), b''): sha1.update(chunk) if sha1.hexdigest() != expected_sha1: raise ValueError(f"SHA‑1 mismatch for {dest_path.name}") return True
# ------------------------------ # UUP operations # ------------------------------ def fetch_uup_info(build: str, lang: str, edition: str) -> Dict: """Get file list + metadata from UUPdump API.""" url = UUP_FILE_LIST_URL.format(build=build, lang=lang) print(f"Fetching file list from {url}") resp = requests.get(url) resp.raise_for_status() data = resp.json() # data structure example: # { # "files": [{"name": "file.cab", "size": 123, "sha1": "...", "url": "..."}], # "editions": ["Pro", "Home"], # "build": "22621.1" # } return data def download_uup_files(uup_data: Dict
def convert_to_iso(uup_dir: Path, edition: str, out_iso: Path, keep_temp=False): """ Convert downloaded UUP files to bootable ISO using external tools: - cabextract + wimlib-imagex to create install.wim/install.esd - mkisofs or oscdimg to build ISO """ temp_dir = uup_dir.parent / "iso_temp" temp_dir.mkdir(exist_ok=True) # Step 1: Extract all CABs (some contain Windows PE, boot files) extract_dir = temp_dir / "extract" extract_dir.mkdir(exist_ok=True) for cab in uup_dir.glob("*.cab"): print(f"Extracting {cab.name}") run_cmd(["cabextract", "-d", str(extract_dir), str(cab)]) # Step 2: Locate the main Windows image (install.wim or install.esd) # Usually inside a cab named something like `windows10.0-kb...-x64.cab` wim_file = None for possible in extract_dir.glob("*.wim"): wim_file = possible break if not wim_file: # Maybe compressed ESD for possible in extract_dir.glob("*.esd"): wim_file = possible break if not wim_file: raise RuntimeError("No WIM/ESD found in extracted UUP files") # Step 3: Apply image to a directory (or export single edition) mount_dir = temp_dir / "mount" mount_dir.mkdir(exist_ok=True) # Get edition index (usually 1 for single edition, but check) edition_index = 1 # For simplicity; real tool would parse run_cmd(["wimlib-imagex", "apply", str(wim_file), str(edition_index), str(mount_dir)]) # Step 4: Prepare ISO layout iso_root = temp_dir / "iso_root" iso_root.mkdir(exist_ok=True) # Copy boot files (UEFI/BIOS) # Usually: bootmgr, bootmgr.efi, efi/, boot/ # For simplicity, assume extracted files have them in `Windows/Boot/` boot_src = mount_dir / "Windows" / "Boot" if boot_src.exists(): run_cmd(["cp", "-r", str(boot_src), str(iso_root)]) # Copy the applied Windows image as install.wim (or use wimlib capture) install_wim = iso_root / "sources" / "install.wim" install_wim.parent.mkdir(exist_ok=True) run_cmd(["wimlib-imagex", "capture", str(mount_dir), str(install_wim), edition, "Windows UUP"]) # Step 5: Create ISO print(f"Creating ISO: {out_iso}") # Use genisoimage (Linux) or mkisofs run_cmd([ "genisoimage", "-b", "boot/etfsboot.com", "-no-emul-boot", "-boot-load-size", "8", "-boot-info-table", "-eltorito-alt-boot", "-e", "efi/microsoft/boot/efisys.bin", "-no-emul-boot", "-o", str(out_iso), str(iso_root) ]) if not keep_temp: run_cmd(["rm", "-rf", str(temp_dir)]) print(f"ISO created: {out_iso}")
def download_files_parallel(file_list, download_dir, max_workers=8): """Download list of (url, path, size, sha1) in parallel.""" with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor: futures = [] for url, path, size, sha1 in file_list: futures.append(executor.submit(download_file, url, path, size, sha1)) for future in concurrent.futures.as_completed(futures): future.result() # raise if any failed edition: str) ->
import os import sys import json import hashlib import subprocess import argparse import concurrent.futures from pathlib import Path from urllib.parse import urljoin import requests from typing import Dict, List, Optional