Support using an external command for idgames fetching.

This commit is contained in:
yrriban 2025-07-30 19:45:51 -04:00
parent 00334120b0
commit 6aaa3ea15f
2 changed files with 38 additions and 20 deletions

View file

@ -2,6 +2,8 @@ import dcc.config
import io
import json
import pathlib
import shutil
import subprocess
import urllib.request
import zipfile
@ -17,31 +19,46 @@ class Fetch(dcc.config.Base):
if not parsed_args.id_or_name.isdigit():
idgames_id = self.search_idgames(parsed_args.id_or_name)
with urllib.request.urlopen(
reply = self.fetch_url(
"https://www.doomworld.com/idgames/api/" +
"api.php?action=get&id={}&out=json".format(idgames_id)
) as response:
reply = json.loads(response.read())
rpath = "/".join([
self.fetch_mirror,
reply["content"]["dir"],
reply["content"]["filename"]
])
wad = reply["content"]["filename"][0:-4]
)
rpath = "/".join([
self.fetch_mirror,
reply["content"]["dir"],
reply["content"]["filename"]
])
wad = reply["content"]["filename"][0:-4]
with urllib.request.urlopen(rpath) as response:
z = zipfile.ZipFile(io.BytesIO(response.read()))
z.extractall(path=self.pwads.joinpath(wad))
with urllib.request.urlopen(rpath) as response:
z = zipfile.ZipFile(io.BytesIO(response.read()))
z.extractall(path=self.pwads.joinpath(wad))
# TODO: explicit error handling. Let users choose when >1 result.
def search_idgames(self, wad):
with urllib.request.urlopen(
reply = self.fetch_url(
"https://www.doomworld.com/idgames/api/" +
"api.php?action=search&query={}&out=json".format(wad)
) as response:
reply = json.loads(response.read())
files = reply["content"]["file"]
if type(files) is dict: # One result.
return files["id"]
else: # More than one. Zero will raise an error.
return files[0]["id"]
)
files = reply["content"]["file"]
if type(files) is dict: # One result.
return files["id"]
else: # More than one. Zero will raise an error.
return files[0]["id"]
# Ideally this would just be urllib.request.open. However, Cloudflare can
# interfere with this. Therefore we support using a curl_impersonate
# binary instead.
def fetch_url(self, url):
fetcher = self._config.get("url_fetcher")
if fetcher is None:
with urllib.request.urlopen(url) as response:
return json.loads(response.read())
fetcher_path = shutil.which(fetcher)
if fetcher_path is None:
raise Exception(f"Fetch util {fetcher} not found on PATH.")
proc = subprocess.run([fetcher_path, url], capture_output=True, check = True)
return json.loads(proc.stdout)