forked from mirrors/nixpkgs
libretro: make update_core.py script run concurrently
This commit is contained in:
parent
8df7516450
commit
5dd47333af
|
@ -2,12 +2,15 @@
|
|||
#!nix-shell -I nixpkgs=../../../../ -i python3 -p "python3.withPackages (ps: with ps; [ requests nix-prefetch-github ])" -p "git"
|
||||
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
SCRIPT_PATH = Path(__file__).absolute().parent
|
||||
HASHES_PATH = SCRIPT_PATH / "hashes.json"
|
||||
GET_REPO_THREADS = int(os.environ.get("GET_REPO_THREADS", 8))
|
||||
CORES = {
|
||||
"atari800": {"repo": "libretro-atari800"},
|
||||
"beetle-gba": {"repo": "beetle-gba-libretro"},
|
||||
|
@ -27,7 +30,7 @@ CORES = {
|
|||
"bsnes": {"repo": "bsnes-libretro"},
|
||||
"bsnes-hd": {"repo": "bsnes-hd", "owner": "DerKoun"},
|
||||
"bsnes-mercury": {"repo": "bsnes-mercury"},
|
||||
"citra": { "repo": "citra", "fetch_submodules": True },
|
||||
"citra": {"repo": "citra", "fetch_submodules": True},
|
||||
"desmume": {"repo": "desmume"},
|
||||
"desmume2015": {"repo": "desmume2015"},
|
||||
"dolphin": {"repo": "dolphin"},
|
||||
|
@ -141,16 +144,23 @@ def get_repo_hash(fetcher="fetchFromGitHub", **kwargs):
|
|||
raise ValueError(f"Unsupported fetcher: {fetcher}")
|
||||
|
||||
|
||||
def get_repo_hashes(cores_to_update=[]):
|
||||
def get_repo_hashes(cores={}):
|
||||
def get_repo_hash_from_core_def(core_def):
|
||||
core, repo = core_def
|
||||
info(f"Getting repo hash for '{core}'...")
|
||||
result = core, get_repo_hash(**repo)
|
||||
info(f"Got repo hash for '{core}'!")
|
||||
return result
|
||||
|
||||
with open(HASHES_PATH) as f:
|
||||
repo_hashes = json.loads(f.read())
|
||||
|
||||
for core, repo in CORES.items():
|
||||
if core in cores_to_update:
|
||||
info(f"Getting repo hash for '{core}'...")
|
||||
repo_hashes[core] = get_repo_hash(**repo)
|
||||
else:
|
||||
info(f"Skipping '{core}'...")
|
||||
info(f"Running with {GET_REPO_THREADS} threads!")
|
||||
with ThreadPoolExecutor(max_workers=GET_REPO_THREADS) as executor:
|
||||
new_repo_hashes = executor.map(get_repo_hash_from_core_def, cores.items())
|
||||
|
||||
for core, repo in new_repo_hashes:
|
||||
repo_hashes[core] = repo
|
||||
|
||||
return repo_hashes
|
||||
|
||||
|
@ -164,7 +174,8 @@ def main():
|
|||
else:
|
||||
cores_to_update = CORES.keys()
|
||||
|
||||
repo_hashes = get_repo_hashes(cores_to_update)
|
||||
cores = {core: repo for core, repo in CORES.items() if core in cores_to_update}
|
||||
repo_hashes = get_repo_hashes(cores)
|
||||
info(f"Generating '{HASHES_PATH}'...")
|
||||
with open(HASHES_PATH, "w") as f:
|
||||
f.write(json.dumps(dict(sorted(repo_hashes.items())), indent=4))
|
||||
|
|
Loading…
Reference in a new issue