mirror of
https://github.com/MarioSpore/Grinch-AP.git
synced 2025-10-21 12:11:33 -06:00

## What is this fixing or adding? Adds a large number of new options, including: - Door Shuffle - Sphere-based level scaling - Key Item and Pokedex requirement options to reach the Elite Four - Split Card Key option - Dexsanity option can be set to a percentage of Pokémon that will be checks - Stonesanity: remove the stones from the Celadon Department Store and shuffle them into the item pool, replacing 4 of the 5 Moon Stone items - Sleep Trap items option - Randomize Move Types option - Town Map Fly Location option, to unlock a flight location when finding/receiving the Town Map Many enhancements have been made, including: - Game allows you to continue your save file _from Pallet Town_ as a way to save warp back to the beginning of the game. The one-way drop from Diglett's Cave to north Route 2 that had been added to the randomizer has been removed. - Client auto-hints some locations when you are able to see the item before you can obtain it (but would only show AP Item if it is for another player), including Bike Shop, Oak's Aides, Celadon Prize Corner, and the unchosen Fossil location. Various bugs have been fixed, including: - Route 13 wild Pokémon not correctly logically requiring Cut - Vanilla tm/hm compatibility options giving compatibility for many TMs/HMs erroneously - If an item that exists in multiple quantities in the item pool is chosen for one of the locations that are pre-filled with local items, it will continue placing that same item in the remaining locations as long as more of that item exist - `start_with` option for `randomize_pokedex` still shuffling a Pokédex into the item pool - The obedience threshold levels being incorrect with 0-2 badges, with Pokémon up to level 30 obeying with 0-1 badges and up to 10 with 2 badges - Receiving a DeathLink trigger in the Safari Zone causing issues. Now, you will have your steps remaining set to 0 instead of blacking out when you're in the Safari Zone. Many location names have been changed, as location names are automatically prepended using the Region name and a large number of areas have been split into new regions as part of the overhaul to add Door Shuffle.
632 lines
24 KiB
Python
632 lines
24 KiB
Python
import base64
|
|
import datetime
|
|
import os
|
|
import platform
|
|
import shutil
|
|
import sys
|
|
import sysconfig
|
|
import typing
|
|
import warnings
|
|
import zipfile
|
|
import urllib.request
|
|
import io
|
|
import json
|
|
import threading
|
|
import subprocess
|
|
|
|
from collections.abc import Iterable
|
|
from hashlib import sha3_512
|
|
from pathlib import Path
|
|
|
|
|
|
# This is a bit jank. We need cx-Freeze to be able to run anything from this script, so install it
|
|
try:
|
|
requirement = 'cx-Freeze>=6.15.2'
|
|
import pkg_resources
|
|
try:
|
|
pkg_resources.require(requirement)
|
|
install_cx_freeze = False
|
|
except pkg_resources.ResolutionError:
|
|
install_cx_freeze = True
|
|
except ImportError:
|
|
install_cx_freeze = True
|
|
pkg_resources = None # type: ignore [assignment]
|
|
|
|
if install_cx_freeze:
|
|
# check if pip is available
|
|
try:
|
|
import pip # noqa: F401
|
|
except ImportError:
|
|
raise RuntimeError("pip not available. Please install pip.")
|
|
# install and import cx_freeze
|
|
if '--yes' not in sys.argv and '-y' not in sys.argv:
|
|
input(f'Requirement {requirement} is not satisfied, press enter to install it')
|
|
subprocess.call([sys.executable, '-m', 'pip', 'install', requirement, '--upgrade'])
|
|
import pkg_resources
|
|
|
|
import cx_Freeze
|
|
|
|
# .build only exists if cx-Freeze is the right version, so we have to update/install that first before this line
|
|
import setuptools.command.build
|
|
|
|
if __name__ == "__main__":
|
|
# need to run this early to import from Utils and Launcher
|
|
# TODO: move stuff to not require this
|
|
import ModuleUpdate
|
|
ModuleUpdate.update(yes="--yes" in sys.argv or "-y" in sys.argv)
|
|
ModuleUpdate.update_ran = False # restore for later
|
|
|
|
from worlds.LauncherComponents import components, icon_paths
|
|
from Utils import version_tuple, is_windows, is_linux
|
|
from Cython.Build import cythonize
|
|
|
|
|
|
# On Python < 3.10 LogicMixin is not currently supported.
|
|
non_apworlds: set = {
|
|
"A Link to the Past",
|
|
"Adventure",
|
|
"ArchipIDLE",
|
|
"Archipelago",
|
|
"ChecksFinder",
|
|
"Clique",
|
|
"DLCQuest",
|
|
"Final Fantasy",
|
|
"Hylics 2",
|
|
"Kingdom Hearts 2",
|
|
"Lufia II Ancient Cave",
|
|
"Meritous",
|
|
"Ocarina of Time",
|
|
"Overcooked! 2",
|
|
"Raft",
|
|
"Secret of Evermore",
|
|
"Slay the Spire",
|
|
"Starcraft 2 Wings of Liberty",
|
|
"Sudoku",
|
|
"Super Mario 64",
|
|
"VVVVVV",
|
|
"Wargroove",
|
|
"Zillion",
|
|
}
|
|
|
|
# LogicMixin is broken before 3.10 import revamp
|
|
if sys.version_info < (3,10):
|
|
non_apworlds.add("Hollow Knight")
|
|
|
|
def download_SNI():
|
|
print("Updating SNI")
|
|
machine_to_go = {
|
|
"x86_64": "amd64",
|
|
"aarch64": "arm64",
|
|
"armv7l": "arm"
|
|
}
|
|
platform_name = platform.system().lower()
|
|
machine_name = platform.machine().lower()
|
|
# force amd64 on macos until we have universal2 sni, otherwise resolve to GOARCH
|
|
machine_name = "amd64" if platform_name == "darwin" else machine_to_go.get(machine_name, machine_name)
|
|
with urllib.request.urlopen("https://api.github.com/repos/alttpo/sni/releases/latest") as request:
|
|
data = json.load(request)
|
|
files = data["assets"]
|
|
|
|
source_url = None
|
|
|
|
for file in files:
|
|
download_url: str = file["browser_download_url"]
|
|
machine_match = download_url.rsplit("-", 1)[1].split(".", 1)[0] == machine_name
|
|
if platform_name in download_url and machine_match:
|
|
# prefer "many" builds
|
|
if "many" in download_url:
|
|
source_url = download_url
|
|
break
|
|
source_url = download_url
|
|
|
|
if source_url and source_url.endswith(".zip"):
|
|
with urllib.request.urlopen(source_url) as download:
|
|
with zipfile.ZipFile(io.BytesIO(download.read()), "r") as zf:
|
|
for member in zf.infolist():
|
|
zf.extract(member, path="SNI")
|
|
print(f"Downloaded SNI from {source_url}")
|
|
|
|
elif source_url and (source_url.endswith(".tar.xz") or source_url.endswith(".tar.gz")):
|
|
import tarfile
|
|
mode = "r:xz" if source_url.endswith(".tar.xz") else "r:gz"
|
|
with urllib.request.urlopen(source_url) as download:
|
|
sni_dir = None
|
|
with tarfile.open(fileobj=io.BytesIO(download.read()), mode=mode) as tf:
|
|
for member in tf.getmembers():
|
|
if member.name.startswith("/") or "../" in member.name:
|
|
raise ValueError(f"Unexpected file '{member.name}' in {source_url}")
|
|
elif member.isdir() and not sni_dir:
|
|
sni_dir = member.name
|
|
elif member.isfile() and not sni_dir or not member.name.startswith(sni_dir):
|
|
raise ValueError(f"Expected folder before '{member.name}' in {source_url}")
|
|
elif member.isfile() and sni_dir:
|
|
tf.extract(member)
|
|
# sadly SNI is in its own folder on non-windows, so we need to rename
|
|
shutil.rmtree("SNI", True)
|
|
os.rename(sni_dir, "SNI")
|
|
print(f"Downloaded SNI from {source_url}")
|
|
|
|
elif source_url:
|
|
print(f"Don't know how to extract SNI from {source_url}")
|
|
|
|
else:
|
|
print(f"No SNI found for system spec {platform_name} {machine_name}")
|
|
|
|
|
|
signtool: typing.Optional[str]
|
|
if os.path.exists("X:/pw.txt"):
|
|
print("Using signtool")
|
|
with open("X:/pw.txt", encoding="utf-8-sig") as f:
|
|
pw = f.read()
|
|
signtool = r'signtool sign /f X:/_SITS_Zertifikat_.pfx /p "' + pw + \
|
|
r'" /fd sha256 /tr http://timestamp.digicert.com/ '
|
|
else:
|
|
signtool = None
|
|
|
|
|
|
build_platform = sysconfig.get_platform()
|
|
arch_folder = "exe.{platform}-{version}".format(platform=build_platform,
|
|
version=sysconfig.get_python_version())
|
|
buildfolder = Path("build", arch_folder)
|
|
build_arch = build_platform.split('-')[-1] if '-' in build_platform else platform.machine()
|
|
|
|
|
|
# see Launcher.py on how to add scripts to setup.py
|
|
def resolve_icon(icon_name: str):
|
|
base_path = icon_paths[icon_name]
|
|
if is_windows:
|
|
path, extension = os.path.splitext(base_path)
|
|
ico_file = path + ".ico"
|
|
assert os.path.exists(ico_file), f"ico counterpart of {base_path} should exist."
|
|
return ico_file
|
|
else:
|
|
return base_path
|
|
|
|
|
|
exes = [
|
|
cx_Freeze.Executable(
|
|
script=f'{c.script_name}.py',
|
|
target_name=c.frozen_name + (".exe" if is_windows else ""),
|
|
icon=resolve_icon(c.icon),
|
|
base="Win32GUI" if is_windows and not c.cli else None
|
|
) for c in components if c.script_name and c.frozen_name
|
|
]
|
|
|
|
extra_data = ["LICENSE", "data", "EnemizerCLI", "SNI"]
|
|
extra_libs = ["libssl.so", "libcrypto.so"] if is_linux else []
|
|
|
|
|
|
def remove_sprites_from_folder(folder):
|
|
for file in os.listdir(folder):
|
|
if file != ".gitignore":
|
|
os.remove(folder / file)
|
|
|
|
|
|
def _threaded_hash(filepath):
|
|
hasher = sha3_512()
|
|
hasher.update(open(filepath, "rb").read())
|
|
return base64.b85encode(hasher.digest()).decode()
|
|
|
|
|
|
# cx_Freeze's build command runs other commands. Override to accept --yes and store that.
|
|
class BuildCommand(setuptools.command.build.build):
|
|
user_options = [
|
|
('yes', 'y', 'Answer "yes" to all questions.'),
|
|
]
|
|
yes: bool
|
|
last_yes: bool = False # used by sub commands of build
|
|
|
|
def initialize_options(self):
|
|
super().initialize_options()
|
|
type(self).last_yes = self.yes = False
|
|
|
|
def finalize_options(self):
|
|
super().finalize_options()
|
|
type(self).last_yes = self.yes
|
|
|
|
|
|
# Override cx_Freeze's build_exe command for pre and post build steps
|
|
class BuildExeCommand(cx_Freeze.command.build_exe.BuildEXE):
|
|
user_options = cx_Freeze.command.build_exe.BuildEXE.user_options + [
|
|
('yes', 'y', 'Answer "yes" to all questions.'),
|
|
('extra-data=', None, 'Additional files to add.'),
|
|
]
|
|
yes: bool
|
|
extra_data: Iterable # [any] not available in 3.8
|
|
extra_libs: Iterable # work around broken include_files
|
|
|
|
buildfolder: Path
|
|
libfolder: Path
|
|
library: Path
|
|
buildtime: datetime.datetime
|
|
|
|
def initialize_options(self):
|
|
super().initialize_options()
|
|
self.yes = BuildCommand.last_yes
|
|
self.extra_data = []
|
|
self.extra_libs = []
|
|
|
|
def finalize_options(self):
|
|
super().finalize_options()
|
|
self.buildfolder = self.build_exe
|
|
self.libfolder = Path(self.buildfolder, "lib")
|
|
self.library = Path(self.libfolder, "library.zip")
|
|
|
|
def installfile(self, path, subpath=None, keep_content: bool = False):
|
|
folder = self.buildfolder
|
|
if subpath:
|
|
folder /= subpath
|
|
print('copying', path, '->', folder)
|
|
if path.is_dir():
|
|
folder /= path.name
|
|
if folder.is_dir() and not keep_content:
|
|
shutil.rmtree(folder)
|
|
shutil.copytree(path, folder, dirs_exist_ok=True)
|
|
elif path.is_file():
|
|
shutil.copy(path, folder)
|
|
else:
|
|
print('Warning,', path, 'not found')
|
|
|
|
def create_manifest(self, create_hashes=False):
|
|
# Since the setup is now split into components and the manifest is not,
|
|
# it makes most sense to just remove the hashes for now. Not aware of anyone using them.
|
|
hashes = {}
|
|
manifestpath = os.path.join(self.buildfolder, "manifest.json")
|
|
if create_hashes:
|
|
from concurrent.futures import ThreadPoolExecutor
|
|
pool = ThreadPoolExecutor()
|
|
for dirpath, dirnames, filenames in os.walk(self.buildfolder):
|
|
for filename in filenames:
|
|
path = os.path.join(dirpath, filename)
|
|
hashes[os.path.relpath(path, start=self.buildfolder)] = pool.submit(_threaded_hash, path)
|
|
|
|
import json
|
|
manifest = {
|
|
"buildtime": self.buildtime.isoformat(sep=" ", timespec="seconds"),
|
|
"hashes": {path: hash.result() for path, hash in hashes.items()},
|
|
"version": version_tuple}
|
|
|
|
json.dump(manifest, open(manifestpath, "wt"), indent=4)
|
|
print("Created Manifest")
|
|
|
|
def run(self):
|
|
# start downloading sni asap
|
|
sni_thread = threading.Thread(target=download_SNI, name="SNI Downloader")
|
|
sni_thread.start()
|
|
|
|
# pre-build steps
|
|
print(f"Outputting to: {self.buildfolder}")
|
|
os.makedirs(self.buildfolder, exist_ok=True)
|
|
import ModuleUpdate
|
|
ModuleUpdate.requirements_files.add(os.path.join("WebHostLib", "requirements.txt"))
|
|
ModuleUpdate.update(yes=self.yes)
|
|
|
|
# auto-build cython modules
|
|
build_ext = self.distribution.get_command_obj("build_ext")
|
|
build_ext.inplace = False
|
|
self.run_command("build_ext")
|
|
# find remains of previous in-place builds, try to delete and warn otherwise
|
|
for path in build_ext.get_outputs():
|
|
parts = os.path.split(path)[-1].split(".")
|
|
pattern = parts[0] + ".*." + parts[-1]
|
|
for match in Path().glob(pattern):
|
|
try:
|
|
match.unlink()
|
|
print(f"Removed {match}")
|
|
except Exception as ex:
|
|
warnings.warn(f"Could not delete old build output: {match}\n"
|
|
f"{ex}\nPlease close all AP instances and delete manually.")
|
|
|
|
# regular cx build
|
|
self.buildtime = datetime.datetime.utcnow()
|
|
super().run()
|
|
|
|
# manually copy built modules to lib folder. cx_Freeze does not know they exist.
|
|
for src in build_ext.get_outputs():
|
|
print(f"copying {src} -> {self.libfolder}")
|
|
shutil.copy(src, self.libfolder, follow_symlinks=False)
|
|
|
|
# need to finish download before copying
|
|
sni_thread.join()
|
|
|
|
# include_files seems to not be done automatically. implement here
|
|
for src, dst in self.include_files:
|
|
print(f"copying {src} -> {self.buildfolder / dst}")
|
|
shutil.copyfile(src, self.buildfolder / dst, follow_symlinks=False)
|
|
|
|
# now that include_files is completely broken, run find_libs here
|
|
for src, dst in find_libs(*self.extra_libs):
|
|
print(f"copying {src} -> {self.buildfolder / dst}")
|
|
shutil.copyfile(src, self.buildfolder / dst, follow_symlinks=False)
|
|
|
|
# post build steps
|
|
if is_windows: # kivy_deps is win32 only, linux picks them up automatically
|
|
from kivy_deps import sdl2, glew
|
|
for folder in sdl2.dep_bins + glew.dep_bins:
|
|
shutil.copytree(folder, self.libfolder, dirs_exist_ok=True)
|
|
print(f"copying {folder} -> {self.libfolder}")
|
|
|
|
for data in self.extra_data:
|
|
self.installfile(Path(data))
|
|
|
|
# kivi data files
|
|
import kivy
|
|
shutil.copytree(os.path.join(os.path.dirname(kivy.__file__), "data"),
|
|
self.buildfolder / "data",
|
|
dirs_exist_ok=True)
|
|
|
|
os.makedirs(self.buildfolder / "Players" / "Templates", exist_ok=True)
|
|
from Options import generate_yaml_templates
|
|
from worlds.AutoWorld import AutoWorldRegister
|
|
assert not non_apworlds - set(AutoWorldRegister.world_types), \
|
|
f"Unknown world {non_apworlds - set(AutoWorldRegister.world_types)} designated for .apworld"
|
|
folders_to_remove: typing.List[str] = []
|
|
generate_yaml_templates(self.buildfolder / "Players" / "Templates", False)
|
|
for worldname, worldtype in AutoWorldRegister.world_types.items():
|
|
if worldname not in non_apworlds:
|
|
file_name = os.path.split(os.path.dirname(worldtype.__file__))[1]
|
|
world_directory = self.libfolder / "worlds" / file_name
|
|
# this method creates an apworld that cannot be moved to a different OS or minor python version,
|
|
# which should be ok
|
|
with zipfile.ZipFile(self.libfolder / "worlds" / (file_name + ".apworld"), "x", zipfile.ZIP_DEFLATED,
|
|
compresslevel=9) as zf:
|
|
for path in world_directory.rglob("*.*"):
|
|
relative_path = os.path.join(*path.parts[path.parts.index("worlds")+1:])
|
|
zf.write(path, relative_path)
|
|
folders_to_remove.append(file_name)
|
|
shutil.rmtree(world_directory)
|
|
shutil.copyfile("meta.yaml", self.buildfolder / "Players" / "Templates" / "meta.yaml")
|
|
# TODO: fix LttP options one day
|
|
shutil.copyfile("playerSettings.yaml", self.buildfolder / "Players" / "Templates" / "A Link to the Past.yaml")
|
|
try:
|
|
from maseya import z3pr
|
|
except ImportError:
|
|
print("Maseya Palette Shuffle not found, skipping data files.")
|
|
else:
|
|
# maseya Palette Shuffle exists and needs its data files
|
|
print("Maseya Palette Shuffle found, including data files...")
|
|
file = z3pr.__file__
|
|
self.installfile(Path(os.path.dirname(file)) / "data", keep_content=True)
|
|
|
|
if signtool:
|
|
for exe in self.distribution.executables:
|
|
print(f"Signing {exe.target_name}")
|
|
os.system(signtool + os.path.join(self.buildfolder, exe.target_name))
|
|
print("Signing SNI")
|
|
os.system(signtool + os.path.join(self.buildfolder, "SNI", "SNI.exe"))
|
|
print("Signing OoT Utils")
|
|
for exe_path in (("Compress", "Compress.exe"), ("Decompress", "Decompress.exe")):
|
|
os.system(signtool + os.path.join(self.buildfolder, "lib", "worlds", "oot", "data", *exe_path))
|
|
|
|
remove_sprites_from_folder(self.buildfolder / "data" / "sprites" / "alttpr")
|
|
|
|
self.create_manifest()
|
|
|
|
if is_windows:
|
|
# Inno setup stuff
|
|
with open("setup.ini", "w") as f:
|
|
min_supported_windows = "6.2.9200" if sys.version_info > (3, 9) else "6.0.6000"
|
|
f.write(f"[Data]\nsource_path={self.buildfolder}\nmin_windows={min_supported_windows}\n")
|
|
with open("installdelete.iss", "w") as f:
|
|
f.writelines("Type: filesandordirs; Name: \"{app}\\lib\\worlds\\"+world_directory+"\"\n"
|
|
for world_directory in folders_to_remove)
|
|
else:
|
|
# make sure extra programs are executable
|
|
enemizer_exe = self.buildfolder / 'EnemizerCLI/EnemizerCLI.Core'
|
|
sni_exe = self.buildfolder / 'SNI/sni'
|
|
extra_exes = (enemizer_exe, sni_exe)
|
|
for extra_exe in extra_exes:
|
|
if extra_exe.is_file():
|
|
extra_exe.chmod(0o755)
|
|
|
|
|
|
class AppImageCommand(setuptools.Command):
|
|
description = "build an app image from build output"
|
|
user_options = [
|
|
("build-folder=", None, "Folder to convert to AppImage."),
|
|
("dist-file=", None, "AppImage output file."),
|
|
("app-dir=", None, "Folder to use for packaging."),
|
|
("app-icon=", None, "The icon to use for the AppImage."),
|
|
("app-exec=", None, "The application to run inside the image."),
|
|
("yes", "y", 'Answer "yes" to all questions.'),
|
|
]
|
|
build_folder: typing.Optional[Path]
|
|
dist_file: typing.Optional[Path]
|
|
app_dir: typing.Optional[Path]
|
|
app_name: str
|
|
app_exec: typing.Optional[Path]
|
|
app_icon: typing.Optional[Path] # source file
|
|
app_id: str # lower case name, used for icon and .desktop
|
|
yes: bool
|
|
|
|
def write_desktop(self):
|
|
assert self.app_dir, "Invalid app_dir"
|
|
desktop_filename = self.app_dir / f"{self.app_id}.desktop"
|
|
with open(desktop_filename, 'w', encoding="utf-8") as f:
|
|
f.write("\n".join((
|
|
"[Desktop Entry]",
|
|
f'Name={self.app_name}',
|
|
f'Exec={self.app_exec}',
|
|
"Type=Application",
|
|
"Categories=Game",
|
|
f'Icon={self.app_id}',
|
|
''
|
|
)))
|
|
desktop_filename.chmod(0o755)
|
|
|
|
def write_launcher(self, default_exe: Path):
|
|
assert self.app_dir, "Invalid app_dir"
|
|
launcher_filename = self.app_dir / "AppRun"
|
|
with open(launcher_filename, 'w', encoding="utf-8") as f:
|
|
f.write(f"""#!/bin/sh
|
|
exe="{default_exe}"
|
|
match="${{1#--executable=}}"
|
|
if [ "${{#match}}" -lt "${{#1}}" ]; then
|
|
exe="$match"
|
|
shift
|
|
elif [ "$1" = "-executable" ] || [ "$1" = "--executable" ]; then
|
|
exe="$2"
|
|
shift; shift
|
|
fi
|
|
tmp="${{exe#*/}}"
|
|
if [ ! "${{#tmp}}" -lt "${{#exe}}" ]; then
|
|
exe="{default_exe.parent}/$exe"
|
|
fi
|
|
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$APPDIR/{default_exe.parent}/lib"
|
|
$APPDIR/$exe "$@"
|
|
""")
|
|
launcher_filename.chmod(0o755)
|
|
|
|
def install_icon(self, src: Path, name: typing.Optional[str] = None, symlink: typing.Optional[Path] = None):
|
|
assert self.app_dir, "Invalid app_dir"
|
|
try:
|
|
from PIL import Image
|
|
except ModuleNotFoundError:
|
|
if not self.yes:
|
|
input("Requirement PIL is not satisfied, press enter to install it")
|
|
subprocess.call([sys.executable, '-m', 'pip', 'install', 'Pillow', '--upgrade'])
|
|
from PIL import Image
|
|
im = Image.open(src)
|
|
res, _ = im.size
|
|
|
|
if not name:
|
|
name = src.stem
|
|
ext = src.suffix
|
|
dest_dir = Path(self.app_dir / f'usr/share/icons/hicolor/{res}x{res}/apps')
|
|
dest_dir.mkdir(parents=True, exist_ok=True)
|
|
dest_file = dest_dir / f'{name}{ext}'
|
|
shutil.copy(src, dest_file)
|
|
if symlink:
|
|
symlink.symlink_to(dest_file.relative_to(symlink.parent))
|
|
|
|
def initialize_options(self):
|
|
self.build_folder = None
|
|
self.app_dir = None
|
|
self.app_name = self.distribution.metadata.name
|
|
self.app_icon = self.distribution.executables[0].icon
|
|
self.app_exec = Path('opt/{app_name}/{exe}'.format(
|
|
app_name=self.distribution.metadata.name, exe=self.distribution.executables[0].target_name
|
|
))
|
|
self.dist_file = Path("dist", "{app_name}_{app_version}_{platform}.AppImage".format(
|
|
app_name=self.distribution.metadata.name, app_version=self.distribution.metadata.version,
|
|
platform=sysconfig.get_platform()
|
|
))
|
|
self.yes = False
|
|
|
|
def finalize_options(self):
|
|
if not self.app_dir:
|
|
self.app_dir = self.build_folder.parent / "AppDir"
|
|
self.app_id = self.app_name.lower()
|
|
|
|
def run(self):
|
|
self.dist_file.parent.mkdir(parents=True, exist_ok=True)
|
|
if self.app_dir.is_dir():
|
|
shutil.rmtree(self.app_dir)
|
|
self.app_dir.mkdir(parents=True)
|
|
opt_dir = self.app_dir / "opt" / self.distribution.metadata.name
|
|
shutil.copytree(self.build_folder, opt_dir)
|
|
root_icon = self.app_dir / f'{self.app_id}{self.app_icon.suffix}'
|
|
self.install_icon(self.app_icon, self.app_id, symlink=root_icon)
|
|
shutil.copy(root_icon, self.app_dir / '.DirIcon')
|
|
self.write_desktop()
|
|
self.write_launcher(self.app_exec)
|
|
print(f'{self.app_dir} -> {self.dist_file}')
|
|
subprocess.call(f'ARCH={build_arch} ./appimagetool -n "{self.app_dir}" "{self.dist_file}"', shell=True)
|
|
|
|
|
|
def find_libs(*args: str) -> typing.Sequence[typing.Tuple[str, str]]:
|
|
"""Try to find system libraries to be included."""
|
|
if not args:
|
|
return []
|
|
|
|
arch = build_arch.replace('_', '-')
|
|
libc = 'libc6' # we currently don't support musl
|
|
|
|
def parse(line):
|
|
lib, path = line.strip().split(' => ')
|
|
lib, typ = lib.split(' ', 1)
|
|
for test_arch in ('x86-64', 'i386', 'aarch64'):
|
|
if test_arch in typ:
|
|
lib_arch = test_arch
|
|
break
|
|
else:
|
|
lib_arch = ''
|
|
for test_libc in ('libc6',):
|
|
if test_libc in typ:
|
|
lib_libc = test_libc
|
|
break
|
|
else:
|
|
lib_libc = ''
|
|
return (lib, lib_arch, lib_libc), path
|
|
|
|
if not hasattr(find_libs, "cache"):
|
|
ldconfig = shutil.which("ldconfig")
|
|
assert ldconfig, "Make sure ldconfig is in PATH"
|
|
data = subprocess.run([ldconfig, "-p"], capture_output=True, text=True).stdout.split("\n")[1:]
|
|
find_libs.cache = { # type: ignore [attr-defined]
|
|
k: v for k, v in (parse(line) for line in data if "=>" in line)
|
|
}
|
|
|
|
def find_lib(lib, arch, libc):
|
|
for k, v in find_libs.cache.items():
|
|
if k == (lib, arch, libc):
|
|
return v
|
|
for k, v, in find_libs.cache.items():
|
|
if k[0].startswith(lib) and k[1] == arch and k[2] == libc:
|
|
return v
|
|
return None
|
|
|
|
res = []
|
|
for arg in args:
|
|
# try exact match, empty libc, empty arch, empty arch and libc
|
|
file = find_lib(arg, arch, libc)
|
|
file = file or find_lib(arg, arch, '')
|
|
file = file or find_lib(arg, '', libc)
|
|
file = file or find_lib(arg, '', '')
|
|
# resolve symlinks
|
|
for n in range(0, 5):
|
|
res.append((file, os.path.join('lib', os.path.basename(file))))
|
|
if not os.path.islink(file):
|
|
break
|
|
dirname = os.path.dirname(file)
|
|
file = os.readlink(file)
|
|
if not os.path.isabs(file):
|
|
file = os.path.join(dirname, file)
|
|
return res
|
|
|
|
|
|
cx_Freeze.setup(
|
|
name="Archipelago",
|
|
version=f"{version_tuple.major}.{version_tuple.minor}.{version_tuple.build}",
|
|
description="Archipelago",
|
|
executables=exes,
|
|
ext_modules=cythonize("_speedups.pyx"),
|
|
options={
|
|
"build_exe": {
|
|
"packages": ["worlds", "kivy", "cymem"],
|
|
"includes": [],
|
|
"excludes": ["numpy", "Cython", "PySide2", "PIL",
|
|
"pandas"],
|
|
"zip_include_packages": ["*"],
|
|
"zip_exclude_packages": ["worlds", "sc2"],
|
|
"include_files": [], # broken in cx 6.14.0, we use more special sauce now
|
|
"include_msvcr": False,
|
|
"replace_paths": ["*."],
|
|
"optimize": 1,
|
|
"build_exe": buildfolder,
|
|
"extra_data": extra_data,
|
|
"extra_libs": extra_libs,
|
|
"bin_includes": ["libffi.so", "libcrypt.so"] if is_linux else []
|
|
},
|
|
"bdist_appimage": {
|
|
"build_folder": buildfolder,
|
|
},
|
|
},
|
|
# override commands to get custom stuff in
|
|
cmdclass={
|
|
"build": BuildCommand,
|
|
"build_exe": BuildExeCommand,
|
|
"bdist_appimage": AppImageCommand,
|
|
},
|
|
)
|