Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

make forge and neoforge concurrent #63

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions meta/common/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,3 +86,20 @@ def default_session():
sess.headers.update({"User-Agent": "PrismLauncherMeta/1.0"})

return sess


def remove_files(file_paths):
for file_path in file_paths:
try:
if os.path.isfile(file_path):
os.remove(file_path)
except Exception as e:
print(e)


def filehash(filename, hashtype, blocksize=65536):
hashtype = hashtype()
with open(filename, "rb") as f:
for block in iter(lambda: f.read(blocksize), b""):
hashtype.update(block)
return hashtype.hexdigest()
8 changes: 0 additions & 8 deletions meta/run/update_fabric.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,6 @@
sess = default_session()


def filehash(filename, hashtype, blocksize=65536):
h = hashtype()
with open(filename, "rb") as f:
for block in iter(lambda: f.read(blocksize), b""):
h.update(block)
return h.hexdigest()


def get_maven_url(maven_key, server, ext):
parts = maven_key.split(":", 3)
maven_ver_url = (
Expand Down
274 changes: 145 additions & 129 deletions meta/run/update_forge.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
Get the source files necessary for generating Forge versions
"""

import concurrent.futures
import copy
import hashlib
import json
Expand All @@ -16,7 +17,13 @@

from pydantic import ValidationError

from meta.common import upstream_path, ensure_upstream_dir, default_session
from meta.common import (
upstream_path,
ensure_upstream_dir,
default_session,
remove_files,
filehash,
)
from meta.common.forge import (
JARS_DIR,
INSTALLER_INFO_DIR,
Expand Down Expand Up @@ -57,14 +64,6 @@ def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)


def filehash(filename, hashtype, blocksize=65536):
hashtype = hashtype()
with open(filename, "rb") as f:
for block in iter(lambda: f.read(blocksize), b""):
hashtype.update(block)
return hashtype.hexdigest()


def get_single_forge_files_manifest(longversion):
print(f"Getting Forge manifest for {longversion}")
path_thing = UPSTREAM_DIR + "/forge/files_manifests/%s.json" % longversion
Expand Down Expand Up @@ -140,6 +139,99 @@ def get_single_forge_files_manifest(longversion):
return ret_dict


def process_forge_version(version, jar_path):
installer_info_path = (
UPSTREAM_DIR + "/forge/installer_info/%s.json" % version.long_version
)
profile_path = (
UPSTREAM_DIR + "/forge/installer_manifests/%s.json" % version.long_version
)
version_file_path = (
UPSTREAM_DIR + "/forge/version_manifests/%s.json" % version.long_version
)

if not os.path.isfile(jar_path):
remove_files([profile_path, installer_info_path])
else:
fileSha1 = filehash(jar_path, hashlib.sha1)
try:
rfile = sess.get(version.url() + ".sha1")
rfile.raise_for_status()
if fileSha1 != rfile.text.strip():
remove_files([jar_path, profile_path, installer_info_path])
except Exception as e:
eprint("Failed to check sha1 %s" % version.url())
eprint("Error is %s" % e)

installer_refresh_required = not os.path.isfile(profile_path) or not os.path.isfile(
installer_info_path
)

if installer_refresh_required:
# grab the installer if it's not there
if not os.path.isfile(jar_path):
eprint("Downloading %s" % version.url())
rfile = sess.get(version.url(), stream=True)
rfile.raise_for_status()
with open(jar_path, "wb") as f:
for chunk in rfile.iter_content(chunk_size=128):
f.write(chunk)

eprint("Processing %s" % version.url())
# harvestables from the installer
if not os.path.isfile(profile_path):
print(jar_path)
with zipfile.ZipFile(jar_path) as jar:
with suppress(KeyError):
with jar.open("version.json") as profile_zip_entry:
version_data = profile_zip_entry.read()

# Process: does it parse?
MojangVersion.parse_raw(version_data)

with open(version_file_path, "wb") as versionJsonFile:
versionJsonFile.write(version_data)
versionJsonFile.close()

with jar.open("install_profile.json") as profile_zip_entry:
install_profile_data = profile_zip_entry.read()

# Process: does it parse?
is_parsable = False
exception = None
try:
ForgeInstallerProfile.parse_raw(install_profile_data)
is_parsable = True
except ValidationError as err:
exception = err
try:
ForgeInstallerProfileV2.parse_raw(install_profile_data)
is_parsable = True
except ValidationError as err:
exception = err

if not is_parsable:
if version.is_supported():
raise exception
else:
eprint(
"Version %s is not supported and won't be generated later."
% version.long_version
)

with open(profile_path, "wb") as profileFile:
profileFile.write(install_profile_data)
profileFile.close()

# installer info v1
if not os.path.isfile(installer_info_path):
installer_info = InstallerInfo()
installer_info.sha1hash = filehash(jar_path, hashlib.sha1)
installer_info.sha256hash = filehash(jar_path, hashlib.sha256)
installer_info.size = os.path.getsize(jar_path)
installer_info.write(installer_info_path)


def main():
# get the remote version list fragments
r = sess.get(
Expand Down Expand Up @@ -264,129 +356,53 @@ def main():

print("Grabbing installers and dumping installer profiles...")
# get the installer jars - if needed - and get the installer profiles out of them
for key, entry in new_index.versions.items():
eprint("Updating Forge %s" % key)
if entry.mc_version is None:
eprint("Skipping %d with invalid MC version" % entry.build)
continue

version = ForgeVersion(entry)
if version.url() is None:
eprint("Skipping %d with no valid files" % version.build)
continue
if version.long_version in BAD_VERSIONS:
eprint(f"Skipping bad version {version.long_version}")
continue

jar_path = os.path.join(UPSTREAM_DIR, JARS_DIR, version.filename())

if version.uses_installer():
installer_info_path = (
UPSTREAM_DIR + "/forge/installer_info/%s.json" % version.long_version
)
profile_path = (
UPSTREAM_DIR
+ "/forge/installer_manifests/%s.json" % version.long_version
)
version_file_path = (
UPSTREAM_DIR + "/forge/version_manifests/%s.json" % version.long_version
)
with concurrent.futures.ThreadPoolExecutor() as executor:
for key, entry in new_index.versions.items():
eprint("Updating Forge %s" % key)
if entry.mc_version is None:
eprint("Skipping %d with invalid MC version" % entry.build)
continue

installer_refresh_required = not os.path.isfile(
profile_path
) or not os.path.isfile(installer_info_path)

if installer_refresh_required:
# grab the installer if it's not there
if not os.path.isfile(jar_path):
eprint("Downloading %s" % version.url())
rfile = sess.get(version.url(), stream=True)
rfile.raise_for_status()
with open(jar_path, "wb") as f:
for chunk in rfile.iter_content(chunk_size=128):
f.write(chunk)

eprint("Processing %s" % version.url())
# harvestables from the installer
if not os.path.isfile(profile_path):
print(jar_path)
with zipfile.ZipFile(jar_path) as jar:
with suppress(KeyError):
with jar.open("version.json") as profile_zip_entry:
version_data = profile_zip_entry.read()

# Process: does it parse?
MojangVersion.parse_raw(version_data)

with open(version_file_path, "wb") as versionJsonFile:
versionJsonFile.write(version_data)
versionJsonFile.close()

with jar.open("install_profile.json") as profile_zip_entry:
install_profile_data = profile_zip_entry.read()

# Process: does it parse?
is_parsable = False
exception = None
try:
ForgeInstallerProfile.parse_raw(install_profile_data)
is_parsable = True
except ValidationError as err:
exception = err
try:
ForgeInstallerProfileV2.parse_raw(install_profile_data)
is_parsable = True
except ValidationError as err:
exception = err

if not is_parsable:
if version.is_supported():
raise exception
else:
eprint(
"Version %s is not supported and won't be generated later."
% version.long_version
)

with open(profile_path, "wb") as profileFile:
profileFile.write(install_profile_data)
profileFile.close()

# installer info v1
if not os.path.isfile(installer_info_path):
installer_info = InstallerInfo()
installer_info.sha1hash = filehash(jar_path, hashlib.sha1)
installer_info.sha256hash = filehash(jar_path, hashlib.sha256)
installer_info.size = os.path.getsize(jar_path)
installer_info.write(installer_info_path)
else:
# ignore the two versions without install manifests and jar mod class files
# TODO: fix those versions?
if version.mc_version_sane == "1.6.1":
version = ForgeVersion(entry)
if version.url() is None:
eprint("Skipping %d with no valid files" % version.build)
continue
if version.long_version in BAD_VERSIONS:
eprint(f"Skipping bad version {version.long_version}")
continue

jar_path = os.path.join(UPSTREAM_DIR, JARS_DIR, version.filename())

# only gather legacy info if it's missing
if not os.path.isfile(LEGACYINFO_PATH):
# grab the jar/zip if it's not there
if not os.path.isfile(jar_path):
rfile = sess.get(version.url(), stream=True)
rfile.raise_for_status()
with open(jar_path, "wb") as f:
for chunk in rfile.iter_content(chunk_size=128):
f.write(chunk)
# find the latest timestamp in the zip file
tstamp = datetime.fromtimestamp(0)
with zipfile.ZipFile(jar_path) as jar:
for info in jar.infolist():
tstamp_new = datetime(*info.date_time)
if tstamp_new > tstamp:
tstamp = tstamp_new
legacy_info = ForgeLegacyInfo()
legacy_info.release_time = tstamp
legacy_info.sha1 = filehash(jar_path, hashlib.sha1)
legacy_info.sha256 = filehash(jar_path, hashlib.sha256)
legacy_info.size = os.path.getsize(jar_path)
legacy_info_list.number[key] = legacy_info
if version.uses_installer():
executor.submit(process_forge_version, version, jar_path)
else:
# ignore the two versions without install manifests and jar mod class files
# TODO: fix those versions?
if version.mc_version_sane == "1.6.1":
continue

# only gather legacy info if it's missing
if not os.path.isfile(LEGACYINFO_PATH):
# grab the jar/zip if it's not there
if not os.path.isfile(jar_path):
rfile = sess.get(version.url(), stream=True)
rfile.raise_for_status()
with open(jar_path, "wb") as f:
for chunk in rfile.iter_content(chunk_size=128):
f.write(chunk)
# find the latest timestamp in the zip file
tstamp = datetime.fromtimestamp(0)
with zipfile.ZipFile(jar_path) as jar:
for info in jar.infolist():
tstamp_new = datetime(*info.date_time)
if tstamp_new > tstamp:
tstamp = tstamp_new
legacy_info = ForgeLegacyInfo()
legacy_info.release_time = tstamp
legacy_info.sha1 = filehash(jar_path, hashlib.sha1)
legacy_info.sha256 = filehash(jar_path, hashlib.sha256)
legacy_info.size = os.path.getsize(jar_path)
legacy_info_list.number[key] = legacy_info

# only write legacy info if it's missing
if not os.path.isfile(LEGACYINFO_PATH):
Expand Down
Loading