diff --git a/.github/workflows/gh-pages.yml b/.github/workflows/gh-pages.yml index 10a2d730b..46fb46c52 100644 --- a/.github/workflows/gh-pages.yml +++ b/.github/workflows/gh-pages.yml @@ -79,7 +79,9 @@ jobs: - name: Extract the PyPi-enabled pyodide-lock.json run: | cp files/requirements.txt pyodide/my-requirements.txt - pyodide/run_docker --non-interactive node tools/makelock.mjs my-requirements.txt dist/pypi-pyodide-lock.json + cp tools/makelock.mjs pyodide/makelock.mjs + pyodide/run_docker --non-interactive node makelock.mjs my-requirements.txt dist/pypi-pyodide-lock.json + rm pyodide/makelock.mjs rm pyodide/my-requirements.txt - name: Clean up the pyodide build run: | diff --git a/pyodide b/pyodide index 2df7665d5..c8ab7d631 160000 --- a/pyodide +++ b/pyodide @@ -1 +1 @@ -Subproject commit 2df7665d54e4d731af2491e75564981fea45168c +Subproject commit c8ab7d6313b312242689fb27c64398c2da3d9022 diff --git a/tools/makelock.mjs b/tools/makelock.mjs new file mode 100644 index 000000000..bc2583f64 --- /dev/null +++ b/tools/makelock.mjs @@ -0,0 +1,32 @@ +import { readFileSync, writeFileSync } from "fs"; +import { argv } from "process"; + +import { loadPyodide } from "dist/pyodide.mjs"; + +const [_node_path, _script_path, requirements_path, new_lockfile_path] = argv; + +const requirements = readFileSync(requirements_path, { encoding: 'utf8' }); + +const py = await loadPyodide({ packages: ["micropip"] }); + +await py.runPythonAsync(` +import micropip + +micropip.set_index_urls([ + # TODO: use a locally-hosted index with the fresh wheels + "https://lab.climet.eu/main/pypa/simple/{package_name}/", + "https://pypi.org/pypi/{package_name}/json", +]) + +await micropip.install([ + r for r in """${requirements}""".splitlines() + if len(r) > 0 and not r.startswith('#') +], verbose=True) + +with open("/pyodide-lock.json", "w") as f: + f.write(micropip.freeze()) +`); + +const lock = py.FS.readFile("/pyodide-lock.json", { encoding: 'utf8' }); + +writeFileSync(new_lockfile_path, lock);