Merge remote-tracking branch 'origin/dev' into dev
This commit is contained in:
2
.github/pyright-config.json
vendored
2
.github/pyright-config.json
vendored
@@ -29,7 +29,7 @@
|
||||
"reportMissingImports": true,
|
||||
"reportMissingTypeStubs": true,
|
||||
|
||||
"pythonVersion": "3.10",
|
||||
"pythonVersion": "3.11",
|
||||
"pythonPlatform": "Windows",
|
||||
|
||||
"executionEnvironments": [
|
||||
|
||||
2
.github/workflows/analyze-modified-files.yml
vendored
2
.github/workflows/analyze-modified-files.yml
vendored
@@ -53,7 +53,7 @@ jobs:
|
||||
- uses: actions/setup-python@v5
|
||||
if: env.diff != ''
|
||||
with:
|
||||
python-version: '3.10'
|
||||
python-version: '3.11'
|
||||
|
||||
- name: "Install dependencies"
|
||||
if: env.diff != ''
|
||||
|
||||
15
.github/workflows/build.yml
vendored
15
.github/workflows/build.yml
vendored
@@ -9,22 +9,25 @@ on:
|
||||
- 'setup.py'
|
||||
- 'requirements.txt'
|
||||
- '*.iss'
|
||||
- 'worlds/*/archipelago.json'
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/build.yml'
|
||||
- 'setup.py'
|
||||
- 'requirements.txt'
|
||||
- '*.iss'
|
||||
- 'worlds/*/archipelago.json'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
ENEMIZER_VERSION: 7.1
|
||||
# NOTE: since appimage/appimagetool and appimage/type2-runtime does not have tags anymore,
|
||||
# we check the sha256 and require manual intervention if it was updated.
|
||||
APPIMAGETOOL_VERSION: continuous
|
||||
APPIMAGETOOL_X86_64_HASH: '363dafac070b65cc36ca024b74db1f043c6f5cd7be8fca760e190dce0d18d684'
|
||||
APPIMAGE_RUNTIME_VERSION: continuous
|
||||
APPIMAGE_RUNTIME_X86_64_HASH: 'e3c4dfb70eddf42e7e5a1d28dff396d30563aa9a901970aebe6f01f3fecf9f8e'
|
||||
APPIMAGE_FORK: 'PopTracker'
|
||||
APPIMAGETOOL_VERSION: 'r-2025-10-19'
|
||||
APPIMAGETOOL_X86_64_HASH: '9493a6b253a01f84acb9c624c38810ecfa11d99daa829b952b0bff43113080f9'
|
||||
APPIMAGE_RUNTIME_VERSION: 'r-2025-08-11'
|
||||
APPIMAGE_RUNTIME_X86_64_HASH: 'e70ffa9b69b211574d0917adc482dd66f25a0083427b5945783965d55b0b0a8b'
|
||||
|
||||
permissions: # permissions required for attestation
|
||||
id-token: 'write'
|
||||
@@ -139,9 +142,9 @@ jobs:
|
||||
- name: Install build-time dependencies
|
||||
run: |
|
||||
echo "PYTHON=python3.12" >> $GITHUB_ENV
|
||||
wget -nv https://github.com/AppImage/appimagetool/releases/download/$APPIMAGETOOL_VERSION/appimagetool-x86_64.AppImage
|
||||
wget -nv https://github.com/$APPIMAGE_FORK/appimagetool/releases/download/$APPIMAGETOOL_VERSION/appimagetool-x86_64.AppImage
|
||||
echo "$APPIMAGETOOL_X86_64_HASH appimagetool-x86_64.AppImage" | sha256sum -c
|
||||
wget -nv https://github.com/AppImage/type2-runtime/releases/download/$APPIMAGE_RUNTIME_VERSION/runtime-x86_64
|
||||
wget -nv https://github.com/$APPIMAGE_FORK/type2-runtime/releases/download/$APPIMAGE_RUNTIME_VERSION/runtime-x86_64
|
||||
echo "$APPIMAGE_RUNTIME_X86_64_HASH runtime-x86_64" | sha256sum -c
|
||||
chmod a+rx appimagetool-x86_64.AppImage
|
||||
./appimagetool-x86_64.AppImage --appimage-extract
|
||||
|
||||
154
.github/workflows/docker.yml
vendored
Normal file
154
.github/workflows/docker.yml
vendored
Normal file
@@ -0,0 +1,154 @@
|
||||
name: Build and Publish Docker Images
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- "**"
|
||||
- "!docs/**"
|
||||
- "!deploy/**"
|
||||
- "!setup.py"
|
||||
- "!.gitignore"
|
||||
- "!.github/workflows/**"
|
||||
- ".github/workflows/docker.yml"
|
||||
branches:
|
||||
- "*"
|
||||
tags:
|
||||
- "v?[0-9]+.[0-9]+.[0-9]*"
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
|
||||
jobs:
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
image-name: ${{ steps.image.outputs.name }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
package-name: ${{ steps.package.outputs.name }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set lowercase image name
|
||||
id: image
|
||||
run: |
|
||||
echo "name=${GITHUB_REPOSITORY,,}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set package name
|
||||
id: package
|
||||
run: |
|
||||
echo "name=$(basename ${GITHUB_REPOSITORY,,})" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Extract metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ steps.image.outputs.name }}
|
||||
tags: |
|
||||
type=ref,event=branch,enable={{is_not_default_branch}}
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=raw,value=nightly,enable={{is_default_branch}}
|
||||
|
||||
- name: Compute final tags
|
||||
id: final-tags
|
||||
run: |
|
||||
readarray -t tags <<< "${{ steps.meta.outputs.tags }}"
|
||||
|
||||
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||
tag="${{ github.ref_name }}"
|
||||
if [[ "$tag" =~ ^v?[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
full_latest="${{ env.REGISTRY }}/${{ steps.image.outputs.name }}:latest"
|
||||
# Check if latest is already in tags to avoid duplicates
|
||||
if ! printf '%s\n' "${tags[@]}" | grep -q "^$full_latest$"; then
|
||||
tags+=("$full_latest")
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Set multiline output
|
||||
echo "tags<<EOF" >> $GITHUB_OUTPUT
|
||||
printf '%s\n' "${tags[@]}" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
|
||||
build:
|
||||
needs: prepare
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- platform: amd64
|
||||
runner: ubuntu-latest
|
||||
suffix: amd64
|
||||
cache-scope: amd64
|
||||
- platform: arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
suffix: arm64
|
||||
cache-scope: arm64
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Compute suffixed tags
|
||||
id: tags
|
||||
run: |
|
||||
readarray -t tags <<< "${{ needs.prepare.outputs.tags }}"
|
||||
suffixed=()
|
||||
for t in "${tags[@]}"; do
|
||||
suffixed+=("$t-${{ matrix.suffix }}")
|
||||
done
|
||||
echo "tags=$(IFS=','; echo "${suffixed[*]}")" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: linux/${{ matrix.platform }}
|
||||
push: true
|
||||
tags: ${{ steps.tags.outputs.tags }}
|
||||
labels: ${{ needs.prepare.outputs.labels }}
|
||||
cache-from: type=gha,scope=${{ matrix.cache-scope }}
|
||||
cache-to: type=gha,mode=max,scope=${{ matrix.cache-scope }}
|
||||
provenance: false
|
||||
|
||||
manifest:
|
||||
needs: [prepare, build]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
steps:
|
||||
- name: Log in to GitHub Container Registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Create and push multi-arch manifest
|
||||
run: |
|
||||
readarray -t tag_array <<< "${{ needs.prepare.outputs.tags }}"
|
||||
|
||||
for tag in "${tag_array[@]}"; do
|
||||
docker manifest create "$tag" \
|
||||
"$tag-amd64" \
|
||||
"$tag-arm64"
|
||||
|
||||
docker manifest push "$tag"
|
||||
done
|
||||
1
.github/workflows/label-pull-requests.yml
vendored
1
.github/workflows/label-pull-requests.yml
vendored
@@ -12,7 +12,6 @@ env:
|
||||
jobs:
|
||||
labeler:
|
||||
name: 'Apply content-based labels'
|
||||
if: github.event.action == 'opened' || github.event.action == 'reopened' || github.event.action == 'synchronize'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@v5
|
||||
|
||||
15
.github/workflows/release.yml
vendored
15
.github/workflows/release.yml
vendored
@@ -5,16 +5,17 @@ name: Release
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*.*.*'
|
||||
- 'v?[0-9]+.[0-9]+.[0-9]*'
|
||||
|
||||
env:
|
||||
ENEMIZER_VERSION: 7.1
|
||||
# NOTE: since appimage/appimagetool and appimage/type2-runtime does not have tags anymore,
|
||||
# we check the sha256 and require manual intervention if it was updated.
|
||||
APPIMAGETOOL_VERSION: continuous
|
||||
APPIMAGETOOL_X86_64_HASH: '363dafac070b65cc36ca024b74db1f043c6f5cd7be8fca760e190dce0d18d684'
|
||||
APPIMAGE_RUNTIME_VERSION: continuous
|
||||
APPIMAGE_RUNTIME_X86_64_HASH: 'e3c4dfb70eddf42e7e5a1d28dff396d30563aa9a901970aebe6f01f3fecf9f8e'
|
||||
APPIMAGE_FORK: 'PopTracker'
|
||||
APPIMAGETOOL_VERSION: 'r-2025-10-19'
|
||||
APPIMAGETOOL_X86_64_HASH: '9493a6b253a01f84acb9c624c38810ecfa11d99daa829b952b0bff43113080f9'
|
||||
APPIMAGE_RUNTIME_VERSION: 'r-2025-08-11'
|
||||
APPIMAGE_RUNTIME_X86_64_HASH: 'e70ffa9b69b211574d0917adc482dd66f25a0083427b5945783965d55b0b0a8b'
|
||||
|
||||
permissions: # permissions required for attestation
|
||||
id-token: 'write'
|
||||
@@ -127,9 +128,9 @@ jobs:
|
||||
- name: Install build-time dependencies
|
||||
run: |
|
||||
echo "PYTHON=python3.12" >> $GITHUB_ENV
|
||||
wget -nv https://github.com/AppImage/appimagetool/releases/download/$APPIMAGETOOL_VERSION/appimagetool-x86_64.AppImage
|
||||
wget -nv https://github.com/$APPIMAGE_FORK/appimagetool/releases/download/$APPIMAGETOOL_VERSION/appimagetool-x86_64.AppImage
|
||||
echo "$APPIMAGETOOL_X86_64_HASH appimagetool-x86_64.AppImage" | sha256sum -c
|
||||
wget -nv https://github.com/AppImage/type2-runtime/releases/download/$APPIMAGE_RUNTIME_VERSION/runtime-x86_64
|
||||
wget -nv https://github.com/$APPIMAGE_FORK/type2-runtime/releases/download/$APPIMAGE_RUNTIME_VERSION/runtime-x86_64
|
||||
echo "$APPIMAGE_RUNTIME_X86_64_HASH runtime-x86_64" | sha256sum -c
|
||||
chmod a+rx appimagetool-x86_64.AppImage
|
||||
./appimagetool-x86_64.AppImage --appimage-extract
|
||||
|
||||
12
.github/workflows/unittests.yml
vendored
12
.github/workflows/unittests.yml
vendored
@@ -39,15 +39,15 @@ jobs:
|
||||
matrix:
|
||||
os: [ubuntu-latest]
|
||||
python:
|
||||
- {version: '3.10'}
|
||||
- {version: '3.11'}
|
||||
- {version: '3.11.2'} # Change to '3.11' around 2026-06-10
|
||||
- {version: '3.12'}
|
||||
- {version: '3.13'}
|
||||
include:
|
||||
- python: {version: '3.10'} # old compat
|
||||
- python: {version: '3.11'} # old compat
|
||||
os: windows-latest
|
||||
- python: {version: '3.12'} # current
|
||||
- python: {version: '3.13'} # current
|
||||
os: windows-latest
|
||||
- python: {version: '3.12'} # current
|
||||
- python: {version: '3.13'} # current
|
||||
os: macos-latest
|
||||
|
||||
steps:
|
||||
@@ -75,7 +75,7 @@ jobs:
|
||||
os:
|
||||
- ubuntu-latest
|
||||
python:
|
||||
- {version: '3.12'} # current
|
||||
- {version: '3.13'} # current
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
24
.run/Build APWorld.run.xml
Normal file
24
.run/Build APWorld.run.xml
Normal file
@@ -0,0 +1,24 @@
|
||||
<component name="ProjectRunConfigurationManager">
|
||||
<configuration default="false" name="Build APWorld" type="PythonConfigurationType" factoryName="Python">
|
||||
<module name="Archipelago" />
|
||||
<option name="ENV_FILES" value="" />
|
||||
<option name="INTERPRETER_OPTIONS" value="" />
|
||||
<option name="PARENT_ENVS" value="true" />
|
||||
<envs>
|
||||
<env name="PYTHONUNBUFFERED" value="1" />
|
||||
</envs>
|
||||
<option name="SDK_HOME" value="" />
|
||||
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/" />
|
||||
<option name="IS_MODULE_SDK" value="true" />
|
||||
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||
<option name="SCRIPT_NAME" value="$ContentRoot$/Launcher.py" />
|
||||
<option name="PARAMETERS" value="\"Build APWorlds\"" />
|
||||
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||
<option name="EMULATE_TERMINAL" value="false" />
|
||||
<option name="MODULE_MODE" value="false" />
|
||||
<option name="REDIRECT_INPUT" value="false" />
|
||||
<option name="INPUT_FILE" value="" />
|
||||
<method v="2" />
|
||||
</configuration>
|
||||
</component>
|
||||
@@ -261,6 +261,7 @@ class MultiWorld():
|
||||
"local_items": set(item_link.get("local_items", [])),
|
||||
"non_local_items": set(item_link.get("non_local_items", [])),
|
||||
"link_replacement": replacement_prio.index(item_link["link_replacement"]),
|
||||
"skip_if_solo": item_link.get("skip_if_solo", False),
|
||||
}
|
||||
|
||||
for _name, item_link in item_links.items():
|
||||
@@ -284,6 +285,8 @@ class MultiWorld():
|
||||
|
||||
for group_name, item_link in item_links.items():
|
||||
game = item_link["game"]
|
||||
if item_link["skip_if_solo"] and len(item_link["players"]) == 1:
|
||||
continue
|
||||
group_id, group = self.add_group(group_name, game, set(item_link["players"]))
|
||||
|
||||
group["item_pool"] = item_link["item_pool"]
|
||||
@@ -1343,8 +1346,7 @@ class Region:
|
||||
for entrance in self.entrances: # BFS might be better here, trying DFS for now.
|
||||
return entrance.parent_region.get_connecting_entrance(is_main_entrance)
|
||||
|
||||
def add_locations(self, locations: Dict[str, Optional[int]],
|
||||
location_type: Optional[type[Location]] = None) -> None:
|
||||
def add_locations(self, locations: Mapping[str, int | None], location_type: type[Location] | None = None) -> None:
|
||||
"""
|
||||
Adds locations to the Region object, where location_type is your Location class and locations is a dict of
|
||||
location names to address.
|
||||
@@ -1432,8 +1434,8 @@ class Region:
|
||||
entrance.connect(self)
|
||||
return entrance
|
||||
|
||||
def add_exits(self, exits: Union[Iterable[str], Dict[str, Optional[str]]],
|
||||
rules: Dict[str, Callable[[CollectionState], bool]] = None) -> List[Entrance]:
|
||||
def add_exits(self, exits: Iterable[str] | Mapping[str, str | None],
|
||||
rules: Mapping[str, Callable[[CollectionState], bool]] | None = None) -> List[Entrance]:
|
||||
"""
|
||||
Connects current region to regions in exit dictionary. Passed region names must exist first.
|
||||
|
||||
@@ -1441,7 +1443,7 @@ class Region:
|
||||
created entrances will be named "self.name -> connecting_region"
|
||||
:param rules: rules for the exits from this region. format is {"connecting_region": rule}
|
||||
"""
|
||||
if not isinstance(exits, Dict):
|
||||
if not isinstance(exits, Mapping):
|
||||
exits = dict.fromkeys(exits)
|
||||
return [
|
||||
self.connect(
|
||||
@@ -1855,6 +1857,9 @@ class Spoiler:
|
||||
Utils.__version__, self.multiworld.seed))
|
||||
outfile.write('Filling Algorithm: %s\n' % self.multiworld.algorithm)
|
||||
outfile.write('Players: %d\n' % self.multiworld.players)
|
||||
if self.multiworld.players > 1:
|
||||
loc_count = len([loc for loc in self.multiworld.get_locations() if not loc.is_event])
|
||||
outfile.write('Total Location Count: %d\n' % loc_count)
|
||||
outfile.write(f'Plando Options: {self.multiworld.plando_options}\n')
|
||||
AutoWorld.call_stage(self.multiworld, "write_spoiler_header", outfile)
|
||||
|
||||
@@ -1863,6 +1868,9 @@ class Spoiler:
|
||||
outfile.write('\nPlayer %d: %s\n' % (player, self.multiworld.get_player_name(player)))
|
||||
outfile.write('Game: %s\n' % self.multiworld.game[player])
|
||||
|
||||
loc_count = len([loc for loc in self.multiworld.get_locations(player) if not loc.is_event])
|
||||
outfile.write('Location Count: %d\n' % loc_count)
|
||||
|
||||
for f_option, option in self.multiworld.worlds[player].options_dataclass.type_hints.items():
|
||||
write_option(f_option, option)
|
||||
|
||||
|
||||
@@ -99,17 +99,6 @@ class ClientCommandProcessor(CommandProcessor):
|
||||
self.ctx.on_print_json({"data": parts, "cmd": "PrintJSON"})
|
||||
return True
|
||||
|
||||
def get_current_datapackage(self) -> dict[str, typing.Any]:
|
||||
"""
|
||||
Return datapackage for current game if known.
|
||||
|
||||
:return: The datapackage for the currently registered game. If not found, an empty dictionary will be returned.
|
||||
"""
|
||||
if not self.ctx.game:
|
||||
return {}
|
||||
checksum = self.ctx.checksums[self.ctx.game]
|
||||
return Utils.load_data_package_for_checksum(self.ctx.game, checksum)
|
||||
|
||||
def _cmd_missing(self, filter_text = "") -> bool:
|
||||
"""List all missing location checks, from your local game state.
|
||||
Can be given text, which will be used as filter."""
|
||||
@@ -119,8 +108,8 @@ class ClientCommandProcessor(CommandProcessor):
|
||||
count = 0
|
||||
checked_count = 0
|
||||
|
||||
lookup = self.get_current_datapackage().get("location_name_to_id", {})
|
||||
for location, location_id in lookup.items():
|
||||
lookup = self.ctx.location_names[self.ctx.game]
|
||||
for location_id, location in lookup.items():
|
||||
if filter_text and filter_text not in location:
|
||||
continue
|
||||
if location_id < 0:
|
||||
@@ -141,11 +130,10 @@ class ClientCommandProcessor(CommandProcessor):
|
||||
self.output("No missing location checks found.")
|
||||
return True
|
||||
|
||||
def output_datapackage_part(self, key: str, name: str) -> bool:
|
||||
def output_datapackage_part(self, name: typing.Literal["Item Names", "Location Names"]) -> bool:
|
||||
"""
|
||||
Helper to digest a specific section of this game's datapackage.
|
||||
|
||||
:param key: The dictionary key in the datapackage.
|
||||
:param name: Printed to the user as context for the part.
|
||||
|
||||
:return: Whether the process was successful.
|
||||
@@ -154,23 +142,20 @@ class ClientCommandProcessor(CommandProcessor):
|
||||
self.output(f"No game set, cannot determine {name}.")
|
||||
return False
|
||||
|
||||
lookup = self.get_current_datapackage().get(key)
|
||||
if lookup is None:
|
||||
self.output("datapackage not yet loaded, try again")
|
||||
return False
|
||||
|
||||
lookup = self.ctx.item_names if name == "Item Names" else self.ctx.location_names
|
||||
lookup = lookup[self.ctx.game]
|
||||
self.output(f"{name} for {self.ctx.game}")
|
||||
for key in lookup:
|
||||
self.output(key)
|
||||
for name in lookup.values():
|
||||
self.output(name)
|
||||
return True
|
||||
|
||||
def _cmd_items(self) -> bool:
|
||||
"""List all item names for the currently running game."""
|
||||
return self.output_datapackage_part("item_name_to_id", "Item Names")
|
||||
return self.output_datapackage_part("Item Names")
|
||||
|
||||
def _cmd_locations(self) -> bool:
|
||||
"""List all location names for the currently running game."""
|
||||
return self.output_datapackage_part("location_name_to_id", "Location Names")
|
||||
return self.output_datapackage_part("Location Names")
|
||||
|
||||
def output_group_part(self, group_key: typing.Literal["item_name_groups", "location_name_groups"],
|
||||
filter_key: str,
|
||||
@@ -871,9 +856,9 @@ async def server_loop(ctx: CommonContext, address: typing.Optional[str] = None)
|
||||
|
||||
server_url = urllib.parse.urlparse(address)
|
||||
if server_url.username:
|
||||
ctx.username = server_url.username
|
||||
ctx.username = urllib.parse.unquote(server_url.username)
|
||||
if server_url.password:
|
||||
ctx.password = server_url.password
|
||||
ctx.password = urllib.parse.unquote(server_url.password)
|
||||
|
||||
def reconnect_hint() -> str:
|
||||
return ", type /connect to reconnect" if ctx.server_address else ""
|
||||
|
||||
@@ -28,7 +28,7 @@ COPY requirements.txt WebHostLib/requirements.txt
|
||||
|
||||
RUN pip install --no-cache-dir -r \
|
||||
WebHostLib/requirements.txt \
|
||||
"setuptools<81"
|
||||
"setuptools>=75,<81"
|
||||
|
||||
COPY _speedups.pyx .
|
||||
COPY intset.h .
|
||||
@@ -36,7 +36,7 @@ COPY intset.h .
|
||||
RUN cythonize -b -i _speedups.pyx
|
||||
|
||||
# Archipelago
|
||||
FROM python:3.12-slim AS archipelago
|
||||
FROM python:3.12-slim-bookworm AS archipelago
|
||||
ARG TARGETARCH
|
||||
ENV VIRTUAL_ENV=/opt/venv
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
8
Fill.py
8
Fill.py
@@ -129,6 +129,10 @@ def fill_restrictive(multiworld: MultiWorld, base_state: CollectionState, locati
|
||||
for i, location in enumerate(placements))
|
||||
for (i, location, unsafe) in swap_attempts:
|
||||
placed_item = location.item
|
||||
if item_to_place == placed_item:
|
||||
# The number of allowed swaps is limited, so do not allow a swap of an item with a copy of
|
||||
# itself.
|
||||
continue
|
||||
# Unplaceable items can sometimes be swapped infinitely. Limit the
|
||||
# number of times we will swap an individual item to prevent this
|
||||
swap_count = swapped_items[placed_item.player, placed_item.name, unsafe]
|
||||
@@ -549,10 +553,12 @@ def distribute_items_restrictive(multiworld: MultiWorld,
|
||||
if prioritylocations and regular_progression:
|
||||
# retry with one_item_per_player off because some priority fills can fail to fill with that optimization
|
||||
# deprioritized items are still not in the mix, so they need to be collected into state first.
|
||||
# allow_partial should only be set if there is deprioritized progression to fall back on.
|
||||
priority_retry_state = sweep_from_pool(multiworld.state, deprioritized_progression)
|
||||
fill_restrictive(multiworld, priority_retry_state, prioritylocations, regular_progression,
|
||||
single_player_placement=single_player, swap=False, on_place=mark_for_locking,
|
||||
name="Priority Retry", one_item_per_player=False, allow_partial=True)
|
||||
name="Priority Retry", one_item_per_player=False,
|
||||
allow_partial=bool(deprioritized_progression))
|
||||
|
||||
if prioritylocations and deprioritized_progression:
|
||||
# There are no more regular progression items that can be placed on any priority locations.
|
||||
|
||||
58
Generate.py
58
Generate.py
@@ -23,7 +23,7 @@ from BaseClasses import seeddigits, get_seed, PlandoOptions
|
||||
from Utils import parse_yamls, version_tuple, __version__, tuplize_version
|
||||
|
||||
|
||||
def mystery_argparse():
|
||||
def mystery_argparse(argv: list[str] | None = None):
|
||||
from settings import get_settings
|
||||
settings = get_settings()
|
||||
defaults = settings.generator
|
||||
@@ -57,7 +57,7 @@ def mystery_argparse():
|
||||
parser.add_argument("--spoiler_only", action="store_true",
|
||||
help="Skips generation assertion and multidata, outputting only a spoiler log. "
|
||||
"Intended for debugging and testing purposes.")
|
||||
args = parser.parse_args()
|
||||
args = parser.parse_args(argv)
|
||||
|
||||
if args.skip_output and args.spoiler_only:
|
||||
parser.error("Cannot mix --skip_output and --spoiler_only")
|
||||
@@ -166,19 +166,10 @@ def main(args=None) -> tuple[argparse.Namespace, int]:
|
||||
f"A mix is also permitted.")
|
||||
|
||||
from worlds.AutoWorld import AutoWorldRegister
|
||||
from worlds.alttp.EntranceRandomizer import parse_arguments
|
||||
erargs = parse_arguments(['--multi', str(args.multi)])
|
||||
erargs.seed = seed
|
||||
erargs.plando_options = args.plando
|
||||
erargs.spoiler = args.spoiler
|
||||
erargs.race = args.race
|
||||
erargs.outputname = seed_name
|
||||
erargs.outputpath = args.outputpath
|
||||
erargs.skip_prog_balancing = args.skip_prog_balancing
|
||||
erargs.skip_output = args.skip_output
|
||||
erargs.spoiler_only = args.spoiler_only
|
||||
erargs.name = {}
|
||||
erargs.csv_output = args.csv_output
|
||||
args.outputname = seed_name
|
||||
args.sprite = dict.fromkeys(range(1, args.multi+1), None)
|
||||
args.sprite_pool = dict.fromkeys(range(1, args.multi+1), None)
|
||||
args.name = {}
|
||||
|
||||
settings_cache: dict[str, tuple[argparse.Namespace, ...]] = \
|
||||
{fname: (tuple(roll_settings(yaml, args.plando) for yaml in yamls) if args.sameoptions else None)
|
||||
@@ -205,7 +196,7 @@ def main(args=None) -> tuple[argparse.Namespace, int]:
|
||||
for player in range(1, args.multi + 1):
|
||||
player_path_cache[player] = player_files.get(player, args.weights_file_path)
|
||||
name_counter = Counter()
|
||||
erargs.player_options = {}
|
||||
args.player_options = {}
|
||||
|
||||
player = 1
|
||||
while player <= args.multi:
|
||||
@@ -218,21 +209,21 @@ def main(args=None) -> tuple[argparse.Namespace, int]:
|
||||
for k, v in vars(settingsObject).items():
|
||||
if v is not None:
|
||||
try:
|
||||
getattr(erargs, k)[player] = v
|
||||
getattr(args, k)[player] = v
|
||||
except AttributeError:
|
||||
setattr(erargs, k, {player: v})
|
||||
setattr(args, k, {player: v})
|
||||
except Exception as e:
|
||||
raise Exception(f"Error setting {k} to {v} for player {player}") from e
|
||||
|
||||
# name was not specified
|
||||
if player not in erargs.name:
|
||||
if player not in args.name:
|
||||
if path == args.weights_file_path:
|
||||
# weights file, so we need to make the name unique
|
||||
erargs.name[player] = f"Player{player}"
|
||||
args.name[player] = f"Player{player}"
|
||||
else:
|
||||
# use the filename
|
||||
erargs.name[player] = os.path.splitext(os.path.split(path)[-1])[0]
|
||||
erargs.name[player] = handle_name(erargs.name[player], player, name_counter)
|
||||
args.name[player] = os.path.splitext(os.path.split(path)[-1])[0]
|
||||
args.name[player] = handle_name(args.name[player], player, name_counter)
|
||||
|
||||
player += 1
|
||||
except Exception as e:
|
||||
@@ -240,10 +231,10 @@ def main(args=None) -> tuple[argparse.Namespace, int]:
|
||||
else:
|
||||
raise RuntimeError(f'No weights specified for player {player}')
|
||||
|
||||
if len(set(name.lower() for name in erargs.name.values())) != len(erargs.name):
|
||||
raise Exception(f"Names have to be unique. Names: {Counter(name.lower() for name in erargs.name.values())}")
|
||||
if len(set(name.lower() for name in args.name.values())) != len(args.name):
|
||||
raise Exception(f"Names have to be unique. Names: {Counter(name.lower() for name in args.name.values())}")
|
||||
|
||||
return erargs, seed
|
||||
return args, seed
|
||||
|
||||
|
||||
def read_weights_yamls(path) -> tuple[Any, ...]:
|
||||
@@ -495,7 +486,22 @@ def roll_settings(weights: dict, plando_options: PlandoOptions = PlandoOptions.b
|
||||
if required_plando_options:
|
||||
raise Exception(f"Settings reports required plando module {str(required_plando_options)}, "
|
||||
f"which is not enabled.")
|
||||
|
||||
games = requirements.get("game", {})
|
||||
for game, version in games.items():
|
||||
if game not in AutoWorldRegister.world_types:
|
||||
continue
|
||||
if not version:
|
||||
raise Exception(f"Invalid version for game {game}: {version}.")
|
||||
if isinstance(version, str):
|
||||
version = {"min": version}
|
||||
if "min" in version and tuplize_version(version["min"]) > AutoWorldRegister.world_types[game].world_version:
|
||||
raise Exception(f"Settings reports required version of world \"{game}\" is at least {version['min']}, "
|
||||
f"however world is of version "
|
||||
f"{AutoWorldRegister.world_types[game].world_version.as_simple_string()}.")
|
||||
if "max" in version and tuplize_version(version["max"]) < AutoWorldRegister.world_types[game].world_version:
|
||||
raise Exception(f"Settings reports required version of world \"{game}\" is no later than {version['max']}, "
|
||||
f"however world is of version "
|
||||
f"{AutoWorldRegister.world_types[game].world_version.as_simple_string()}.")
|
||||
ret = argparse.Namespace()
|
||||
for option_key in Options.PerGameCommonOptions.type_hints:
|
||||
if option_key in weights and option_key not in Options.CommonOptions.type_hints:
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
if __name__ == '__main__':
|
||||
import ModuleUpdate
|
||||
ModuleUpdate.update()
|
||||
|
||||
import Utils
|
||||
Utils.init_logging("KH1Client", exception_logger="Client")
|
||||
|
||||
from worlds.kh1.Client import launch
|
||||
launch()
|
||||
@@ -1,8 +0,0 @@
|
||||
import ModuleUpdate
|
||||
import Utils
|
||||
from worlds.kh2.Client import launch
|
||||
ModuleUpdate.update()
|
||||
|
||||
if __name__ == '__main__':
|
||||
Utils.init_logging("KH2Client", exception_logger="Client")
|
||||
launch()
|
||||
@@ -484,7 +484,7 @@ def main(args: argparse.Namespace | dict | None = None):
|
||||
|
||||
if __name__ == '__main__':
|
||||
init_logging('Launcher')
|
||||
Utils.freeze_support()
|
||||
multiprocessing.freeze_support()
|
||||
multiprocessing.set_start_method("spawn") # if launched process uses kivy, fork won't work
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Archipelago Launcher',
|
||||
|
||||
13
Main.py
13
Main.py
@@ -37,7 +37,7 @@ def main(args, seed=None, baked_server_options: dict[str, object] | None = None)
|
||||
|
||||
logger = logging.getLogger()
|
||||
multiworld.set_seed(seed, args.race, str(args.outputname) if args.outputname else None)
|
||||
multiworld.plando_options = args.plando_options
|
||||
multiworld.plando_options = args.plando
|
||||
multiworld.game = args.game.copy()
|
||||
multiworld.player_name = args.name.copy()
|
||||
multiworld.sprite = args.sprite.copy()
|
||||
@@ -54,12 +54,17 @@ def main(args, seed=None, baked_server_options: dict[str, object] | None = None)
|
||||
logger.info(f"Found {len(AutoWorld.AutoWorldRegister.world_types)} World Types:")
|
||||
longest_name = max(len(text) for text in AutoWorld.AutoWorldRegister.world_types)
|
||||
|
||||
item_count = len(str(max(len(cls.item_names) for cls in AutoWorld.AutoWorldRegister.world_types.values())))
|
||||
location_count = len(str(max(len(cls.location_names) for cls in AutoWorld.AutoWorldRegister.world_types.values())))
|
||||
world_classes = AutoWorld.AutoWorldRegister.world_types.values()
|
||||
|
||||
version_count = max(len(cls.world_version.as_simple_string()) for cls in world_classes)
|
||||
item_count = len(str(max(len(cls.item_names) for cls in world_classes)))
|
||||
location_count = len(str(max(len(cls.location_names) for cls in world_classes)))
|
||||
|
||||
for name, cls in AutoWorld.AutoWorldRegister.world_types.items():
|
||||
if not cls.hidden and len(cls.item_names) > 0:
|
||||
logger.info(f" {name:{longest_name}}: Items: {len(cls.item_names):{item_count}} | "
|
||||
logger.info(f" {name:{longest_name}}: "
|
||||
f"v{cls.world_version.as_simple_string():{version_count}} | "
|
||||
f"Items: {len(cls.item_names):{item_count}} | "
|
||||
f"Locations: {len(cls.location_names):{location_count}}")
|
||||
|
||||
del item_count, location_count
|
||||
|
||||
@@ -5,15 +5,15 @@ import multiprocessing
|
||||
import warnings
|
||||
|
||||
|
||||
if sys.platform in ("win32", "darwin") and sys.version_info < (3, 10, 11):
|
||||
if sys.platform in ("win32", "darwin") and sys.version_info < (3, 11, 9):
|
||||
# Official micro version updates. This should match the number in docs/running from source.md.
|
||||
raise RuntimeError(f"Incompatible Python Version found: {sys.version_info}. Official 3.10.15+ is supported.")
|
||||
elif sys.platform in ("win32", "darwin") and sys.version_info < (3, 10, 15):
|
||||
raise RuntimeError(f"Incompatible Python Version found: {sys.version_info}. Official 3.11.9+ is supported.")
|
||||
elif sys.platform in ("win32", "darwin") and sys.version_info < (3, 11, 13):
|
||||
# There are known security issues, but no easy way to install fixed versions on Windows for testing.
|
||||
warnings.warn(f"Python Version {sys.version_info} has security issues. Don't use in production.")
|
||||
elif sys.version_info < (3, 10, 1):
|
||||
elif sys.version_info < (3, 11, 0):
|
||||
# Other platforms may get security backports instead of micro updates, so the number is unreliable.
|
||||
raise RuntimeError(f"Incompatible Python Version found: {sys.version_info}. 3.10.1+ is supported.")
|
||||
raise RuntimeError(f"Incompatible Python Version found: {sys.version_info}. 3.11.0+ is supported.")
|
||||
|
||||
# don't run update if environment is frozen/compiled or if not the parent process (skip in subprocess)
|
||||
_skip_update = bool(
|
||||
@@ -74,11 +74,11 @@ def update_command():
|
||||
def install_pkg_resources(yes=False):
|
||||
try:
|
||||
import pkg_resources # noqa: F401
|
||||
except ImportError:
|
||||
except (AttributeError, ImportError):
|
||||
check_pip()
|
||||
if not yes:
|
||||
confirm("pkg_resources not found, press enter to install it")
|
||||
subprocess.call([sys.executable, "-m", "pip", "install", "--upgrade", "setuptools<81"])
|
||||
subprocess.call([sys.executable, "-m", "pip", "install", "--upgrade", "setuptools>=75,<81"])
|
||||
|
||||
|
||||
def update(yes: bool = False, force: bool = False) -> None:
|
||||
|
||||
221
MultiServer.py
221
MultiServer.py
@@ -32,7 +32,7 @@ if typing.TYPE_CHECKING:
|
||||
|
||||
import colorama
|
||||
import websockets
|
||||
from websockets.extensions.permessage_deflate import PerMessageDeflate
|
||||
from websockets.extensions.permessage_deflate import PerMessageDeflate, ServerPerMessageDeflateFactory
|
||||
try:
|
||||
# ponyorm is a requirement for webhost, not default server, so may not be importable
|
||||
from pony.orm.dbapiprovider import OperationalError
|
||||
@@ -50,6 +50,15 @@ from BaseClasses import ItemClassification
|
||||
min_client_version = Version(0, 5, 0)
|
||||
colorama.just_fix_windows_console()
|
||||
|
||||
no_version = Version(0, 0, 0)
|
||||
assert isinstance(no_version, tuple) # assert immutable
|
||||
|
||||
server_per_message_deflate_factory = ServerPerMessageDeflateFactory(
|
||||
server_max_window_bits=11,
|
||||
client_max_window_bits=11,
|
||||
compress_settings={"memLevel": 4},
|
||||
)
|
||||
|
||||
|
||||
def remove_from_list(container, value):
|
||||
try:
|
||||
@@ -125,8 +134,31 @@ def get_saving_second(seed_name: str, interval: int = 60) -> int:
|
||||
|
||||
|
||||
class Client(Endpoint):
|
||||
version = Version(0, 0, 0)
|
||||
tags: typing.List[str]
|
||||
__slots__ = (
|
||||
"__weakref__",
|
||||
"version",
|
||||
"auth",
|
||||
"team",
|
||||
"slot",
|
||||
"send_index",
|
||||
"tags",
|
||||
"messageprocessor",
|
||||
"ctx",
|
||||
"remote_items",
|
||||
"remote_start_inventory",
|
||||
"no_items",
|
||||
"no_locations",
|
||||
"no_text",
|
||||
)
|
||||
|
||||
version: Version
|
||||
auth: bool
|
||||
team: int | None
|
||||
slot: int | None
|
||||
send_index: int
|
||||
tags: list[str]
|
||||
messageprocessor: ClientMessageProcessor
|
||||
ctx: weakref.ref[Context]
|
||||
remote_items: bool
|
||||
remote_start_inventory: bool
|
||||
no_items: bool
|
||||
@@ -135,6 +167,7 @@ class Client(Endpoint):
|
||||
|
||||
def __init__(self, socket: "ServerConnection", ctx: Context) -> None:
|
||||
super().__init__(socket)
|
||||
self.version = no_version
|
||||
self.auth = False
|
||||
self.team = None
|
||||
self.slot = None
|
||||
@@ -142,6 +175,11 @@ class Client(Endpoint):
|
||||
self.tags = []
|
||||
self.messageprocessor = client_message_processor(ctx, self)
|
||||
self.ctx = weakref.ref(ctx)
|
||||
self.remote_items = False
|
||||
self.remote_start_inventory = False
|
||||
self.no_items = False
|
||||
self.no_locations = False
|
||||
self.no_text = False
|
||||
|
||||
@property
|
||||
def items_handling(self):
|
||||
@@ -179,6 +217,7 @@ class Context:
|
||||
"release_mode": str,
|
||||
"remaining_mode": str,
|
||||
"collect_mode": str,
|
||||
"countdown_mode": str,
|
||||
"item_cheat": bool,
|
||||
"compatibility": int}
|
||||
# team -> slot id -> list of clients authenticated to slot.
|
||||
@@ -208,8 +247,8 @@ class Context:
|
||||
|
||||
def __init__(self, host: str, port: int, server_password: str, password: str, location_check_points: int,
|
||||
hint_cost: int, item_cheat: bool, release_mode: str = "disabled", collect_mode="disabled",
|
||||
remaining_mode: str = "disabled", auto_shutdown: typing.SupportsFloat = 0, compatibility: int = 2,
|
||||
log_network: bool = False, logger: logging.Logger = logging.getLogger()):
|
||||
countdown_mode: str = "auto", remaining_mode: str = "disabled", auto_shutdown: typing.SupportsFloat = 0,
|
||||
compatibility: int = 2, log_network: bool = False, logger: logging.Logger = logging.getLogger()):
|
||||
self.logger = logger
|
||||
super(Context, self).__init__()
|
||||
self.slot_info = {}
|
||||
@@ -242,6 +281,7 @@ class Context:
|
||||
self.release_mode: str = release_mode
|
||||
self.remaining_mode: str = remaining_mode
|
||||
self.collect_mode: str = collect_mode
|
||||
self.countdown_mode: str = countdown_mode
|
||||
self.item_cheat = item_cheat
|
||||
self.exit_event = asyncio.Event()
|
||||
self.client_activity_timers: typing.Dict[
|
||||
@@ -627,6 +667,7 @@ class Context:
|
||||
"server_password": self.server_password, "password": self.password,
|
||||
"release_mode": self.release_mode,
|
||||
"remaining_mode": self.remaining_mode, "collect_mode": self.collect_mode,
|
||||
"countdown_mode": self.countdown_mode,
|
||||
"item_cheat": self.item_cheat, "compatibility": self.compatibility}
|
||||
|
||||
}
|
||||
@@ -661,6 +702,7 @@ class Context:
|
||||
self.release_mode = savedata["game_options"]["release_mode"]
|
||||
self.remaining_mode = savedata["game_options"]["remaining_mode"]
|
||||
self.collect_mode = savedata["game_options"]["collect_mode"]
|
||||
self.countdown_mode = savedata["game_options"].get("countdown_mode", self.countdown_mode)
|
||||
self.item_cheat = savedata["game_options"]["item_cheat"]
|
||||
self.compatibility = savedata["game_options"]["compatibility"]
|
||||
|
||||
@@ -1135,8 +1177,13 @@ def register_location_checks(ctx: Context, team: int, slot: int, locations: typi
|
||||
ctx.save()
|
||||
|
||||
|
||||
def collect_hints(ctx: Context, team: int, slot: int, item: typing.Union[int, str], auto_status: HintStatus) \
|
||||
-> typing.List[Hint]:
|
||||
def collect_hints(ctx: Context, team: int, slot: int, item: typing.Union[int, str],
|
||||
status: HintStatus | None = None) -> typing.List[Hint]:
|
||||
"""
|
||||
Collect a new hint for a given item id or name, with a given status.
|
||||
If status is None (which is the default value), an automatic status will be determined from the item's quality.
|
||||
"""
|
||||
|
||||
hints = []
|
||||
slots: typing.Set[int] = {slot}
|
||||
for group_id, group in ctx.groups.items():
|
||||
@@ -1152,25 +1199,39 @@ def collect_hints(ctx: Context, team: int, slot: int, item: typing.Union[int, st
|
||||
else:
|
||||
found = location_id in ctx.location_checks[team, finding_player]
|
||||
entrance = ctx.er_hint_data.get(finding_player, {}).get(location_id, "")
|
||||
new_status = auto_status
|
||||
|
||||
hint_status = status # Assign again because we're in a for loop
|
||||
if found:
|
||||
new_status = HintStatus.HINT_FOUND
|
||||
elif item_flags & ItemClassification.trap:
|
||||
new_status = HintStatus.HINT_AVOID
|
||||
hints.append(Hint(receiving_player, finding_player, location_id, item_id, found, entrance,
|
||||
item_flags, new_status))
|
||||
hint_status = HintStatus.HINT_FOUND
|
||||
elif hint_status is None:
|
||||
if item_flags & ItemClassification.trap:
|
||||
hint_status = HintStatus.HINT_AVOID
|
||||
else:
|
||||
hint_status = HintStatus.HINT_PRIORITY
|
||||
|
||||
hints.append(
|
||||
Hint(receiving_player, finding_player, location_id, item_id, found, entrance, item_flags, hint_status)
|
||||
)
|
||||
|
||||
return hints
|
||||
|
||||
|
||||
def collect_hint_location_name(ctx: Context, team: int, slot: int, location: str, auto_status: HintStatus) \
|
||||
-> typing.List[Hint]:
|
||||
def collect_hint_location_name(ctx: Context, team: int, slot: int, location: str,
|
||||
status: HintStatus | None = HintStatus.HINT_UNSPECIFIED) -> typing.List[Hint]:
|
||||
"""
|
||||
Collect a new hint for a given location name, with a given status (defaults to "unspecified").
|
||||
If None is passed for the status, then an automatic status will be determined from the item's quality.
|
||||
"""
|
||||
seeked_location: int = ctx.location_names_for_game(ctx.games[slot])[location]
|
||||
return collect_hint_location_id(ctx, team, slot, seeked_location, auto_status)
|
||||
return collect_hint_location_id(ctx, team, slot, seeked_location, status)
|
||||
|
||||
|
||||
def collect_hint_location_id(ctx: Context, team: int, slot: int, seeked_location: int, auto_status: HintStatus) \
|
||||
-> typing.List[Hint]:
|
||||
def collect_hint_location_id(ctx: Context, team: int, slot: int, seeked_location: int,
|
||||
status: HintStatus | None = HintStatus.HINT_UNSPECIFIED) -> typing.List[Hint]:
|
||||
"""
|
||||
Collect a new hint for a given location id, with a given status (defaults to "unspecified").
|
||||
If None is passed for the status, then an automatic status will be determined from the item's quality.
|
||||
"""
|
||||
prev_hint = ctx.get_hint(team, slot, seeked_location)
|
||||
if prev_hint:
|
||||
return [prev_hint]
|
||||
@@ -1180,13 +1241,16 @@ def collect_hint_location_id(ctx: Context, team: int, slot: int, seeked_location
|
||||
|
||||
found = seeked_location in ctx.location_checks[team, slot]
|
||||
entrance = ctx.er_hint_data.get(slot, {}).get(seeked_location, "")
|
||||
new_status = auto_status
|
||||
|
||||
if found:
|
||||
new_status = HintStatus.HINT_FOUND
|
||||
elif item_flags & ItemClassification.trap:
|
||||
new_status = HintStatus.HINT_AVOID
|
||||
return [Hint(receiving_player, slot, seeked_location, item_id, found, entrance, item_flags,
|
||||
new_status)]
|
||||
status = HintStatus.HINT_FOUND
|
||||
elif status is None:
|
||||
if item_flags & ItemClassification.trap:
|
||||
status = HintStatus.HINT_AVOID
|
||||
else:
|
||||
status = HintStatus.HINT_PRIORITY
|
||||
|
||||
return [Hint(receiving_player, slot, seeked_location, item_id, found, entrance, item_flags, status)]
|
||||
return []
|
||||
|
||||
|
||||
@@ -1300,7 +1364,8 @@ class CommandProcessor(metaclass=CommandMeta):
|
||||
argname += "=" + parameter.default
|
||||
argtext += argname
|
||||
argtext += " "
|
||||
s += f"{self.marker}{command} {argtext}\n {method.__doc__}\n"
|
||||
doctext = '\n '.join(inspect.getdoc(method).split('\n'))
|
||||
s += f"{self.marker}{command} {argtext}\n {doctext}\n"
|
||||
return s
|
||||
|
||||
def _cmd_help(self):
|
||||
@@ -1329,19 +1394,6 @@ class CommandProcessor(metaclass=CommandMeta):
|
||||
class CommonCommandProcessor(CommandProcessor):
|
||||
ctx: Context
|
||||
|
||||
def _cmd_countdown(self, seconds: str = "10") -> bool:
|
||||
"""Start a countdown in seconds"""
|
||||
try:
|
||||
timer = int(seconds, 10)
|
||||
except ValueError:
|
||||
timer = 10
|
||||
else:
|
||||
if timer > 60 * 60:
|
||||
raise ValueError(f"{timer} is invalid. Maximum is 1 hour.")
|
||||
|
||||
async_start(countdown(self.ctx, timer))
|
||||
return True
|
||||
|
||||
def _cmd_options(self):
|
||||
"""List all current options. Warning: lists password."""
|
||||
self.output("Current options:")
|
||||
@@ -1483,6 +1535,23 @@ class ClientMessageProcessor(CommonCommandProcessor):
|
||||
" You can ask the server admin for a /collect")
|
||||
return False
|
||||
|
||||
def _cmd_countdown(self, seconds: str = "10") -> bool:
|
||||
"""Start a countdown in seconds"""
|
||||
if self.ctx.countdown_mode == "disabled" or \
|
||||
self.ctx.countdown_mode == "auto" and len(self.ctx.player_names) >= 30:
|
||||
self.output("Sorry, client countdowns have been disabled on this server. You can ask the server admin for a /countdown")
|
||||
return False
|
||||
try:
|
||||
timer = int(seconds, 10)
|
||||
except ValueError:
|
||||
timer = 10
|
||||
else:
|
||||
if timer > 60 * 60:
|
||||
raise ValueError(f"{timer} is invalid. Maximum is 1 hour.")
|
||||
|
||||
async_start(countdown(self.ctx, timer))
|
||||
return True
|
||||
|
||||
def _cmd_remaining(self) -> bool:
|
||||
"""List remaining items in your game, but not their location or recipient"""
|
||||
if self.ctx.remaining_mode == "enabled":
|
||||
@@ -1610,7 +1679,6 @@ class ClientMessageProcessor(CommonCommandProcessor):
|
||||
def get_hints(self, input_text: str, for_location: bool = False) -> bool:
|
||||
points_available = get_client_points(self.ctx, self.client)
|
||||
cost = self.ctx.get_hint_cost(self.client.slot)
|
||||
auto_status = HintStatus.HINT_UNSPECIFIED if for_location else HintStatus.HINT_PRIORITY
|
||||
if not input_text:
|
||||
hints = {hint.re_check(self.ctx, self.client.team) for hint in
|
||||
self.ctx.hints[self.client.team, self.client.slot]}
|
||||
@@ -1636,9 +1704,9 @@ class ClientMessageProcessor(CommonCommandProcessor):
|
||||
self.output(f"Sorry, \"{hint_name}\" is marked as non-hintable.")
|
||||
hints = []
|
||||
elif not for_location:
|
||||
hints = collect_hints(self.ctx, self.client.team, self.client.slot, hint_id, auto_status)
|
||||
hints = collect_hints(self.ctx, self.client.team, self.client.slot, hint_id)
|
||||
else:
|
||||
hints = collect_hint_location_id(self.ctx, self.client.team, self.client.slot, hint_id, auto_status)
|
||||
hints = collect_hint_location_id(self.ctx, self.client.team, self.client.slot, hint_id)
|
||||
|
||||
else:
|
||||
game = self.ctx.games[self.client.slot]
|
||||
@@ -1658,16 +1726,18 @@ class ClientMessageProcessor(CommonCommandProcessor):
|
||||
hints = []
|
||||
for item_name in self.ctx.item_name_groups[game][hint_name]:
|
||||
if item_name in self.ctx.item_names_for_game(game): # ensure item has an ID
|
||||
hints.extend(collect_hints(self.ctx, self.client.team, self.client.slot, item_name, auto_status))
|
||||
hints.extend(collect_hints(self.ctx, self.client.team, self.client.slot, item_name))
|
||||
elif not for_location and hint_name in self.ctx.item_names_for_game(game): # item name
|
||||
hints = collect_hints(self.ctx, self.client.team, self.client.slot, hint_name, auto_status)
|
||||
hints = collect_hints(self.ctx, self.client.team, self.client.slot, hint_name)
|
||||
elif hint_name in self.ctx.location_name_groups[game]: # location group name
|
||||
hints = []
|
||||
for loc_name in self.ctx.location_name_groups[game][hint_name]:
|
||||
if loc_name in self.ctx.location_names_for_game(game):
|
||||
hints.extend(collect_hint_location_name(self.ctx, self.client.team, self.client.slot, loc_name, auto_status))
|
||||
hints.extend(
|
||||
collect_hint_location_name(self.ctx, self.client.team, self.client.slot, loc_name)
|
||||
)
|
||||
else: # location name
|
||||
hints = collect_hint_location_name(self.ctx, self.client.team, self.client.slot, hint_name, auto_status)
|
||||
hints = collect_hint_location_name(self.ctx, self.client.team, self.client.slot, hint_name)
|
||||
|
||||
else:
|
||||
self.output(response)
|
||||
@@ -1945,8 +2015,7 @@ async def process_client_cmd(ctx: Context, client: Client, args: dict):
|
||||
|
||||
target_item, target_player, flags = ctx.locations[client.slot][location]
|
||||
if create_as_hint:
|
||||
hints.extend(collect_hint_location_id(ctx, client.team, client.slot, location,
|
||||
HintStatus.HINT_UNSPECIFIED))
|
||||
hints.extend(collect_hint_location_id(ctx, client.team, client.slot, location))
|
||||
locs.append(NetworkItem(target_item, location, target_player, flags))
|
||||
ctx.notify_hints(client.team, hints, only_new=create_as_hint == 2, persist_even_if_found=True)
|
||||
if locs and create_as_hint:
|
||||
@@ -1961,6 +2030,16 @@ async def process_client_cmd(ctx: Context, client: Client, args: dict):
|
||||
if not locations:
|
||||
await ctx.send_msgs(client, [{"cmd": "InvalidPacket", "type": "arguments",
|
||||
"text": "CreateHints: No locations specified.", "original_cmd": cmd}])
|
||||
return
|
||||
|
||||
try:
|
||||
status = HintStatus(status)
|
||||
except ValueError as err:
|
||||
await ctx.send_msgs(client,
|
||||
[{"cmd": "InvalidPacket", "type": "arguments",
|
||||
"text": f"Unknown Status: {err}",
|
||||
"original_cmd": cmd}])
|
||||
return
|
||||
|
||||
hints = []
|
||||
|
||||
@@ -2228,6 +2307,19 @@ class ServerCommandProcessor(CommonCommandProcessor):
|
||||
self.output(f"Could not find player {player_name} to collect")
|
||||
return False
|
||||
|
||||
def _cmd_countdown(self, seconds: str = "10") -> bool:
|
||||
"""Start a countdown in seconds"""
|
||||
try:
|
||||
timer = int(seconds, 10)
|
||||
except ValueError:
|
||||
timer = 10
|
||||
else:
|
||||
if timer > 60 * 60:
|
||||
raise ValueError(f"{timer} is invalid. Maximum is 1 hour.")
|
||||
|
||||
async_start(countdown(self.ctx, timer))
|
||||
return True
|
||||
|
||||
@mark_raw
|
||||
def _cmd_release(self, player_name: str) -> bool:
|
||||
"""Send out the remaining items from a player to their intended recipients."""
|
||||
@@ -2349,9 +2441,9 @@ class ServerCommandProcessor(CommonCommandProcessor):
|
||||
hints = []
|
||||
for item_name_from_group in self.ctx.item_name_groups[game][item]:
|
||||
if item_name_from_group in self.ctx.item_names_for_game(game): # ensure item has an ID
|
||||
hints.extend(collect_hints(self.ctx, team, slot, item_name_from_group, HintStatus.HINT_PRIORITY))
|
||||
hints.extend(collect_hints(self.ctx, team, slot, item_name_from_group))
|
||||
else: # item name or id
|
||||
hints = collect_hints(self.ctx, team, slot, item, HintStatus.HINT_PRIORITY)
|
||||
hints = collect_hints(self.ctx, team, slot, item)
|
||||
|
||||
if hints:
|
||||
self.ctx.notify_hints(team, hints)
|
||||
@@ -2385,17 +2477,14 @@ class ServerCommandProcessor(CommonCommandProcessor):
|
||||
|
||||
if usable:
|
||||
if isinstance(location, int):
|
||||
hints = collect_hint_location_id(self.ctx, team, slot, location,
|
||||
HintStatus.HINT_UNSPECIFIED)
|
||||
hints = collect_hint_location_id(self.ctx, team, slot, location)
|
||||
elif game in self.ctx.location_name_groups and location in self.ctx.location_name_groups[game]:
|
||||
hints = []
|
||||
for loc_name_from_group in self.ctx.location_name_groups[game][location]:
|
||||
if loc_name_from_group in self.ctx.location_names_for_game(game):
|
||||
hints.extend(collect_hint_location_name(self.ctx, team, slot, loc_name_from_group,
|
||||
HintStatus.HINT_UNSPECIFIED))
|
||||
hints.extend(collect_hint_location_name(self.ctx, team, slot, loc_name_from_group))
|
||||
else:
|
||||
hints = collect_hint_location_name(self.ctx, team, slot, location,
|
||||
HintStatus.HINT_UNSPECIFIED)
|
||||
hints = collect_hint_location_name(self.ctx, team, slot, location)
|
||||
if hints:
|
||||
self.ctx.notify_hints(team, hints)
|
||||
else:
|
||||
@@ -2423,6 +2512,11 @@ class ServerCommandProcessor(CommonCommandProcessor):
|
||||
elif value_type == str and option_name.endswith("password"):
|
||||
def value_type(input_text: str):
|
||||
return None if input_text.lower() in {"null", "none", '""', "''"} else input_text
|
||||
elif option_name == "countdown_mode":
|
||||
valid_values = {"enabled", "disabled", "auto"}
|
||||
if option_value.lower() not in valid_values:
|
||||
self.output(f"Unrecognized {option_name} value '{option_value}', known: {', '.join(valid_values)}")
|
||||
return False
|
||||
elif value_type == str and option_name.endswith("mode"):
|
||||
valid_values = {"goal", "enabled", "disabled"}
|
||||
valid_values.update(("auto", "auto_enabled") if option_name != "remaining_mode" else [])
|
||||
@@ -2510,6 +2604,13 @@ def parse_args() -> argparse.Namespace:
|
||||
goal: !collect can be used after goal completion
|
||||
auto-enabled: !collect is available and automatically triggered on goal completion
|
||||
''')
|
||||
parser.add_argument('--countdown_mode', default=defaults["countdown_mode"], nargs='?',
|
||||
choices=['enabled', 'disabled', "auto"], help='''\
|
||||
Select !countdown Accessibility. (default: %(default)s)
|
||||
enabled: !countdown is always available
|
||||
disabled: !countdown is never available
|
||||
auto: !countdown is available for rooms with less than 30 players
|
||||
''')
|
||||
parser.add_argument('--remaining_mode', default=defaults["remaining_mode"], nargs='?',
|
||||
choices=['enabled', 'disabled', "goal"], help='''\
|
||||
Select !remaining Accessibility. (default: %(default)s)
|
||||
@@ -2575,7 +2676,7 @@ async def main(args: argparse.Namespace):
|
||||
|
||||
ctx = Context(args.host, args.port, args.server_password, args.password, args.location_check_points,
|
||||
args.hint_cost, not args.disable_item_cheat, args.release_mode, args.collect_mode,
|
||||
args.remaining_mode,
|
||||
args.countdown_mode, args.remaining_mode,
|
||||
args.auto_shutdown, args.compatibility, args.log_network)
|
||||
data_filename = args.multidata
|
||||
|
||||
@@ -2610,7 +2711,13 @@ async def main(args: argparse.Namespace):
|
||||
|
||||
ssl_context = load_server_cert(args.cert, args.cert_key) if args.cert else None
|
||||
|
||||
ctx.server = websockets.serve(functools.partial(server, ctx=ctx), host=ctx.host, port=ctx.port, ssl=ssl_context)
|
||||
ctx.server = websockets.serve(
|
||||
functools.partial(server, ctx=ctx),
|
||||
host=ctx.host,
|
||||
port=ctx.port,
|
||||
ssl=ssl_context,
|
||||
extensions=[server_per_message_deflate_factory],
|
||||
)
|
||||
ip = args.host if args.host else Utils.get_public_ipv4()
|
||||
logging.info('Hosting game at %s:%d (%s)' % (ip, ctx.port,
|
||||
'No password' if not ctx.password else 'Password: %s' % ctx.password))
|
||||
|
||||
@@ -174,6 +174,8 @@ decode = JSONDecoder(object_hook=_object_hook).decode
|
||||
|
||||
|
||||
class Endpoint:
|
||||
__slots__ = ("socket",)
|
||||
|
||||
socket: "ServerConnection"
|
||||
|
||||
def __init__(self, socket):
|
||||
|
||||
14
Options.py
14
Options.py
@@ -1380,7 +1380,7 @@ class NonLocalItems(ItemSet):
|
||||
|
||||
|
||||
class StartInventory(ItemDict):
|
||||
"""Start with these items."""
|
||||
"""Start with the specified amount of these items. Example: "Bomb: 1" """
|
||||
verify_item_name = True
|
||||
display_name = "Start Inventory"
|
||||
rich_text_doc = True
|
||||
@@ -1388,7 +1388,7 @@ class StartInventory(ItemDict):
|
||||
|
||||
|
||||
class StartInventoryPool(StartInventory):
|
||||
"""Start with these items and don't place them in the world.
|
||||
"""Start with the specified amount of these items and don't place them in the world. Example: "Bomb: 1"
|
||||
|
||||
The game decides what the replacement items will be.
|
||||
"""
|
||||
@@ -1446,6 +1446,7 @@ class ItemLinks(OptionList):
|
||||
Optional("local_items"): [And(str, len)],
|
||||
Optional("non_local_items"): [And(str, len)],
|
||||
Optional("link_replacement"): Or(None, bool),
|
||||
Optional("skip_if_solo"): Or(None, bool),
|
||||
}
|
||||
])
|
||||
|
||||
@@ -1473,8 +1474,10 @@ class ItemLinks(OptionList):
|
||||
super(ItemLinks, self).verify(world, player_name, plando_options)
|
||||
existing_links = set()
|
||||
for link in self.value:
|
||||
link["name"] = link["name"].strip()[:16].strip()
|
||||
if link["name"] in existing_links:
|
||||
raise Exception(f"You cannot have more than one link named {link['name']}.")
|
||||
raise Exception(f"Item link names are limited to their first 16 characters and must be unique. "
|
||||
f"You have more than one link named '{link['name']}'.")
|
||||
existing_links.add(link["name"])
|
||||
|
||||
pool = self.verify_items(link["item_pool"], link["name"], "item_pool", world)
|
||||
@@ -1752,7 +1755,10 @@ def generate_yaml_templates(target_folder: typing.Union[str, "pathlib.Path"], ge
|
||||
|
||||
res = template.render(
|
||||
option_groups=option_groups,
|
||||
__version__=__version__, game=game_name, yaml_dump=yaml_dump_scalar,
|
||||
__version__=__version__,
|
||||
game=game_name,
|
||||
world_version=world.world_version.as_simple_string(),
|
||||
yaml_dump=yaml_dump_scalar,
|
||||
dictify_range=dictify_range,
|
||||
cleandoc=cleandoc,
|
||||
)
|
||||
|
||||
@@ -20,7 +20,6 @@ Currently, the following games are supported:
|
||||
* Meritous
|
||||
* Super Metroid/Link to the Past combo randomizer (SMZ3)
|
||||
* ChecksFinder
|
||||
* ArchipIDLE
|
||||
* Hollow Knight
|
||||
* The Witness
|
||||
* Sonic Adventure 2: Battle
|
||||
@@ -81,6 +80,8 @@ Currently, the following games are supported:
|
||||
* Super Mario Land 2: 6 Golden Coins
|
||||
* shapez
|
||||
* Paint
|
||||
* Celeste (Open World)
|
||||
* Choo-Choo Charles
|
||||
|
||||
For setup and instructions check out our [tutorials page](https://archipelago.gg/tutorial/).
|
||||
Downloads can be found at [Releases](https://github.com/ArchipelagoMW/Archipelago/releases), including compiled
|
||||
|
||||
@@ -18,7 +18,7 @@ from json import loads, dumps
|
||||
from CommonClient import CommonContext, server_loop, ClientCommandProcessor, gui_enabled, get_base_parser
|
||||
|
||||
import Utils
|
||||
from settings import Settings
|
||||
import settings
|
||||
from Utils import async_start
|
||||
from MultiServer import mark_raw
|
||||
if typing.TYPE_CHECKING:
|
||||
@@ -286,7 +286,7 @@ class SNESState(enum.IntEnum):
|
||||
|
||||
|
||||
def launch_sni() -> None:
|
||||
sni_path = Settings.sni_options.sni_path
|
||||
sni_path = settings.get_settings().sni_options.sni_path
|
||||
|
||||
if not os.path.isdir(sni_path):
|
||||
sni_path = Utils.local_path(sni_path)
|
||||
@@ -669,7 +669,7 @@ async def game_watcher(ctx: SNIContext) -> None:
|
||||
|
||||
|
||||
async def run_game(romfile: str) -> None:
|
||||
auto_start = Settings.sni_options.snes_rom_start
|
||||
auto_start = settings.get_settings().sni_options.snes_rom_start
|
||||
if auto_start is True:
|
||||
import webbrowser
|
||||
webbrowser.open(romfile)
|
||||
|
||||
@@ -1,11 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import ModuleUpdate
|
||||
ModuleUpdate.update()
|
||||
|
||||
from worlds.sc2.Client import launch
|
||||
import Utils
|
||||
|
||||
if __name__ == "__main__":
|
||||
Utils.init_logging("Starcraft2Client", exception_logger="Client")
|
||||
launch()
|
||||
90
Utils.py
90
Utils.py
@@ -1,6 +1,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import concurrent.futures
|
||||
import json
|
||||
import typing
|
||||
import builtins
|
||||
@@ -35,7 +36,7 @@ if typing.TYPE_CHECKING:
|
||||
|
||||
|
||||
def tuplize_version(version: str) -> Version:
|
||||
return Version(*(int(piece, 10) for piece in version.split(".")))
|
||||
return Version(*(int(piece) for piece in version.split(".")))
|
||||
|
||||
|
||||
class Version(typing.NamedTuple):
|
||||
@@ -47,7 +48,7 @@ class Version(typing.NamedTuple):
|
||||
return ".".join(str(item) for item in self)
|
||||
|
||||
|
||||
__version__ = "0.6.3"
|
||||
__version__ = "0.6.4"
|
||||
version_tuple = tuplize_version(__version__)
|
||||
|
||||
is_linux = sys.platform.startswith("linux")
|
||||
@@ -322,11 +323,13 @@ def get_options() -> Settings:
|
||||
return get_settings()
|
||||
|
||||
|
||||
def persistent_store(category: str, key: str, value: typing.Any):
|
||||
path = user_path("_persistent_storage.yaml")
|
||||
def persistent_store(category: str, key: str, value: typing.Any, force_store: bool = False):
|
||||
storage = persistent_load()
|
||||
if not force_store and category in storage and key in storage[category] and storage[category][key] == value:
|
||||
return # no changes necessary
|
||||
category_dict = storage.setdefault(category, {})
|
||||
category_dict[key] = value
|
||||
path = user_path("_persistent_storage.yaml")
|
||||
with open(path, "wt") as f:
|
||||
f.write(dump(storage, Dumper=Dumper))
|
||||
|
||||
@@ -414,11 +417,11 @@ def get_adjuster_settings(game_name: str) -> Namespace:
|
||||
@cache_argsless
|
||||
def get_unique_identifier():
|
||||
common_path = cache_path("common.json")
|
||||
if os.path.exists(common_path):
|
||||
try:
|
||||
with open(common_path) as f:
|
||||
common_file = json.load(f)
|
||||
uuid = common_file.get("uuid", None)
|
||||
else:
|
||||
except FileNotFoundError:
|
||||
common_file = {}
|
||||
uuid = None
|
||||
|
||||
@@ -428,6 +431,9 @@ def get_unique_identifier():
|
||||
from uuid import uuid4
|
||||
uuid = str(uuid4())
|
||||
common_file["uuid"] = uuid
|
||||
|
||||
cache_folder = os.path.dirname(common_path)
|
||||
os.makedirs(cache_folder, exist_ok=True)
|
||||
with open(common_path, "w") as f:
|
||||
json.dump(common_file, f, separators=(",", ":"))
|
||||
return uuid
|
||||
@@ -472,7 +478,7 @@ class RestrictedUnpickler(pickle.Unpickler):
|
||||
mod = importlib.import_module(module)
|
||||
obj = getattr(mod, name)
|
||||
if issubclass(obj, (self.options_module.Option, self.options_module.PlandoConnection,
|
||||
self.options_module.PlandoText)):
|
||||
self.options_module.PlandoItem, self.options_module.PlandoText)):
|
||||
return obj
|
||||
# Forbid everything else.
|
||||
raise pickle.UnpicklingError(f"global '{module}.{name}' is forbidden")
|
||||
@@ -715,13 +721,22 @@ def get_intended_text(input_text: str, possible_answers) -> typing.Tuple[str, bo
|
||||
|
||||
|
||||
def get_input_text_from_response(text: str, command: str) -> typing.Optional[str]:
|
||||
"""
|
||||
Parses the response text from `get_intended_text` to find the suggested input and autocomplete the command in
|
||||
arguments with it.
|
||||
|
||||
:param text: The response text from `get_intended_text`.
|
||||
:param command: The command to which the input text should be added. Must contain the prefix used by the command
|
||||
(`!` or `/`).
|
||||
:return: The command with the suggested input text appended, or None if no suggestion was found.
|
||||
"""
|
||||
if "did you mean " in text:
|
||||
for question in ("Didn't find something that closely matches",
|
||||
"Too many close matches"):
|
||||
if text.startswith(question):
|
||||
name = get_text_between(text, "did you mean '",
|
||||
"'? (")
|
||||
return f"!{command} {name}"
|
||||
return f"{command} {name}"
|
||||
elif text.startswith("Missing: "):
|
||||
return text.replace("Missing: ", "!hint_location ")
|
||||
return None
|
||||
@@ -900,7 +915,7 @@ def async_start(co: Coroutine[None, None, typing.Any], name: Optional[str] = Non
|
||||
Use this to start a task when you don't keep a reference to it or immediately await it,
|
||||
to prevent early garbage collection. "fire-and-forget"
|
||||
"""
|
||||
# https://docs.python.org/3.10/library/asyncio-task.html#asyncio.create_task
|
||||
# https://docs.python.org/3.11/library/asyncio-task.html#asyncio.create_task
|
||||
# Python docs:
|
||||
# ```
|
||||
# Important: Save a reference to the result of [asyncio.create_task],
|
||||
@@ -937,15 +952,15 @@ class DeprecateDict(dict):
|
||||
|
||||
|
||||
def _extend_freeze_support() -> None:
|
||||
"""Extend multiprocessing.freeze_support() to also work on Non-Windows for spawn."""
|
||||
# upstream issue: https://github.com/python/cpython/issues/76327
|
||||
"""Extend multiprocessing.freeze_support() to also work on Non-Windows and without setting spawn method first."""
|
||||
# original upstream issue: https://github.com/python/cpython/issues/76327
|
||||
# code based on https://github.com/pyinstaller/pyinstaller/blob/develop/PyInstaller/hooks/rthooks/pyi_rth_multiprocessing.py#L26
|
||||
import multiprocessing
|
||||
import multiprocessing.spawn
|
||||
|
||||
def _freeze_support() -> None:
|
||||
"""Minimal freeze_support. Only apply this if frozen."""
|
||||
from subprocess import _args_from_interpreter_flags
|
||||
from subprocess import _args_from_interpreter_flags # noqa
|
||||
|
||||
# Prevent `spawn` from trying to read `__main__` in from the main script
|
||||
multiprocessing.process.ORIGINAL_DIR = None
|
||||
@@ -972,17 +987,23 @@ def _extend_freeze_support() -> None:
|
||||
multiprocessing.spawn.spawn_main(**kwargs)
|
||||
sys.exit()
|
||||
|
||||
if not is_windows and is_frozen():
|
||||
multiprocessing.freeze_support = multiprocessing.spawn.freeze_support = _freeze_support
|
||||
def _noop() -> None:
|
||||
pass
|
||||
|
||||
multiprocessing.freeze_support = multiprocessing.spawn.freeze_support = _freeze_support if is_frozen() else _noop
|
||||
|
||||
|
||||
def freeze_support() -> None:
|
||||
"""This behaves like multiprocessing.freeze_support but also works on Non-Windows."""
|
||||
"""This now only calls multiprocessing.freeze_support since we are patching freeze_support on module load."""
|
||||
import multiprocessing
|
||||
_extend_freeze_support()
|
||||
|
||||
deprecate("Use multiprocessing.freeze_support() instead")
|
||||
multiprocessing.freeze_support()
|
||||
|
||||
|
||||
_extend_freeze_support()
|
||||
|
||||
|
||||
def visualize_regions(root_region: Region, file_name: str, *,
|
||||
show_entrance_names: bool = False, show_locations: bool = True, show_other_regions: bool = True,
|
||||
linetype_ortho: bool = True, regions_to_highlight: set[Region] | None = None) -> None:
|
||||
@@ -1118,3 +1139,40 @@ def is_iterable_except_str(obj: object) -> TypeGuard[typing.Iterable[typing.Any]
|
||||
if isinstance(obj, str):
|
||||
return False
|
||||
return isinstance(obj, typing.Iterable)
|
||||
|
||||
|
||||
class DaemonThreadPoolExecutor(concurrent.futures.ThreadPoolExecutor):
|
||||
"""
|
||||
ThreadPoolExecutor that uses daemonic threads that do not keep the program alive.
|
||||
NOTE: use this with caution because killed threads will not properly clean up.
|
||||
"""
|
||||
|
||||
def _adjust_thread_count(self):
|
||||
# see upstream ThreadPoolExecutor for details
|
||||
import threading
|
||||
import weakref
|
||||
from concurrent.futures.thread import _worker
|
||||
|
||||
if self._idle_semaphore.acquire(timeout=0):
|
||||
return
|
||||
|
||||
def weakref_cb(_, q=self._work_queue):
|
||||
q.put(None)
|
||||
|
||||
num_threads = len(self._threads)
|
||||
if num_threads < self._max_workers:
|
||||
thread_name = f"{self._thread_name_prefix or self}_{num_threads}"
|
||||
t = threading.Thread(
|
||||
name=thread_name,
|
||||
target=_worker,
|
||||
args=(
|
||||
weakref.ref(self, weakref_cb),
|
||||
self._work_queue,
|
||||
self._initializer,
|
||||
self._initargs,
|
||||
),
|
||||
daemon=True,
|
||||
)
|
||||
t.start()
|
||||
self._threads.add(t)
|
||||
# NOTE: don't add to _threads_queues so we don't block on shutdown
|
||||
|
||||
@@ -99,16 +99,23 @@ if __name__ == "__main__":
|
||||
multiprocessing.set_start_method('spawn')
|
||||
logging.basicConfig(format='[%(asctime)s] %(message)s', level=logging.INFO)
|
||||
|
||||
from WebHostLib.lttpsprites import update_sprites_lttp
|
||||
from WebHostLib.autolauncher import autohost, autogen, stop
|
||||
from WebHostLib.options import create as create_options_files
|
||||
|
||||
try:
|
||||
from WebHostLib.lttpsprites import update_sprites_lttp
|
||||
update_sprites_lttp()
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
logging.warning("Could not update LttP sprites.")
|
||||
app = get_app()
|
||||
from worlds import AutoWorldRegister
|
||||
# Update to only valid WebHost worlds
|
||||
invalid_worlds = {name for name, world in AutoWorldRegister.world_types.items()
|
||||
if not hasattr(world.web, "tutorials")}
|
||||
if invalid_worlds:
|
||||
logging.error(f"Following worlds not loaded as they are invalid for WebHost: {invalid_worlds}")
|
||||
AutoWorldRegister.world_types = {k: v for k, v in AutoWorldRegister.world_types.items() if k not in invalid_worlds}
|
||||
create_options_files()
|
||||
copy_tutorials_files_to_static()
|
||||
if app.config["SELFLAUNCH"]:
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import base64
|
||||
import os
|
||||
import socket
|
||||
import typing
|
||||
import uuid
|
||||
|
||||
from flask import Flask
|
||||
@@ -61,20 +62,21 @@ cache = Cache()
|
||||
Compress(app)
|
||||
|
||||
|
||||
def to_python(value):
|
||||
def to_python(value: str) -> uuid.UUID:
|
||||
return uuid.UUID(bytes=base64.urlsafe_b64decode(value + '=='))
|
||||
|
||||
|
||||
def to_url(value):
|
||||
def to_url(value: uuid.UUID) -> str:
|
||||
return base64.urlsafe_b64encode(value.bytes).rstrip(b'=').decode('ascii')
|
||||
|
||||
|
||||
class B64UUIDConverter(BaseConverter):
|
||||
|
||||
def to_python(self, value):
|
||||
def to_python(self, value: str) -> uuid.UUID:
|
||||
return to_python(value)
|
||||
|
||||
def to_url(self, value):
|
||||
def to_url(self, value: typing.Any) -> str:
|
||||
assert isinstance(value, uuid.UUID)
|
||||
return to_url(value)
|
||||
|
||||
|
||||
@@ -84,7 +86,7 @@ app.jinja_env.filters["suuid"] = to_url
|
||||
app.jinja_env.filters["title_sorted"] = title_sorted
|
||||
|
||||
|
||||
def register():
|
||||
def register() -> None:
|
||||
"""Import submodules, triggering their registering on flask routing.
|
||||
Note: initializes worlds subsystem."""
|
||||
import importlib
|
||||
|
||||
@@ -11,5 +11,5 @@ api_endpoints = Blueprint('api', __name__, url_prefix="/api")
|
||||
def get_players(seed: Seed) -> List[Tuple[str, str]]:
|
||||
return [(slot.player_name, slot.game) for slot in seed.slots.order_by(Slot.player_id)]
|
||||
|
||||
|
||||
from . import datapackage, generate, room, user # trigger registration
|
||||
# trigger endpoint registration
|
||||
from . import datapackage, generate, room, tracker, user
|
||||
|
||||
241
WebHostLib/api/tracker.py
Normal file
241
WebHostLib/api/tracker.py
Normal file
@@ -0,0 +1,241 @@
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, TypedDict
|
||||
from uuid import UUID
|
||||
|
||||
from flask import abort
|
||||
|
||||
from NetUtils import ClientStatus, Hint, NetworkItem, SlotType
|
||||
from WebHostLib import cache
|
||||
from WebHostLib.api import api_endpoints
|
||||
from WebHostLib.models import Room
|
||||
from WebHostLib.tracker import TrackerData
|
||||
|
||||
|
||||
class PlayerAlias(TypedDict):
|
||||
team: int
|
||||
player: int
|
||||
alias: str | None
|
||||
|
||||
|
||||
class PlayerItemsReceived(TypedDict):
|
||||
team: int
|
||||
player: int
|
||||
items: list[NetworkItem]
|
||||
|
||||
|
||||
class PlayerChecksDone(TypedDict):
|
||||
team: int
|
||||
player: int
|
||||
locations: list[int]
|
||||
|
||||
|
||||
class TeamTotalChecks(TypedDict):
|
||||
team: int
|
||||
checks_done: int
|
||||
|
||||
|
||||
class PlayerHints(TypedDict):
|
||||
team: int
|
||||
player: int
|
||||
hints: list[Hint]
|
||||
|
||||
|
||||
class PlayerTimer(TypedDict):
|
||||
team: int
|
||||
player: int
|
||||
time: datetime | None
|
||||
|
||||
|
||||
class PlayerStatus(TypedDict):
|
||||
team: int
|
||||
player: int
|
||||
status: ClientStatus
|
||||
|
||||
|
||||
class PlayerLocationsTotal(TypedDict):
|
||||
team: int
|
||||
player: int
|
||||
total_locations: int
|
||||
|
||||
|
||||
@api_endpoints.route("/tracker/<suuid:tracker>")
|
||||
@cache.memoize(timeout=60)
|
||||
def tracker_data(tracker: UUID) -> dict[str, Any]:
|
||||
"""
|
||||
Outputs json data to <root_path>/api/tracker/<id of current session tracker>.
|
||||
|
||||
:param tracker: UUID of current session tracker.
|
||||
|
||||
:return: Tracking data for all players in the room. Typing and docstrings describe the format of each value.
|
||||
"""
|
||||
room: Room | None = Room.get(tracker=tracker)
|
||||
if not room:
|
||||
abort(404)
|
||||
|
||||
tracker_data = TrackerData(room)
|
||||
|
||||
all_players: dict[int, list[int]] = tracker_data.get_all_players()
|
||||
|
||||
player_aliases: list[PlayerAlias] = []
|
||||
"""Slot aliases of all players."""
|
||||
for team, players in all_players.items():
|
||||
for player in players:
|
||||
player_aliases.append({"team": team, "player": player, "alias": tracker_data.get_player_alias(team, player)})
|
||||
|
||||
player_items_received: list[PlayerItemsReceived] = []
|
||||
"""Items received by each player."""
|
||||
for team, players in all_players.items():
|
||||
for player in players:
|
||||
player_items_received.append(
|
||||
{"team": team, "player": player, "items": tracker_data.get_player_received_items(team, player)})
|
||||
|
||||
player_checks_done: list[PlayerChecksDone] = []
|
||||
"""ID of all locations checked by each player."""
|
||||
for team, players in all_players.items():
|
||||
for player in players:
|
||||
player_checks_done.append(
|
||||
{"team": team, "player": player, "locations": sorted(tracker_data.get_player_checked_locations(team, player))})
|
||||
|
||||
total_checks_done: list[TeamTotalChecks] = [
|
||||
{"team": team, "checks_done": checks_done}
|
||||
for team, checks_done in tracker_data.get_team_locations_checked_count().items()
|
||||
]
|
||||
"""Total number of locations checked for the entire multiworld per team."""
|
||||
|
||||
hints: list[PlayerHints] = []
|
||||
"""Hints that all players have used or received."""
|
||||
for team, players in tracker_data.get_all_slots().items():
|
||||
for player in players:
|
||||
player_hints = sorted(tracker_data.get_player_hints(team, player))
|
||||
hints.append({"team": team, "player": player, "hints": player_hints})
|
||||
slot_info = tracker_data.get_slot_info(player)
|
||||
# this assumes groups are always after players
|
||||
if slot_info.type != SlotType.group:
|
||||
continue
|
||||
for member in slot_info.group_members:
|
||||
hints[member - 1]["hints"] += player_hints
|
||||
|
||||
activity_timers: list[PlayerTimer] = []
|
||||
"""Time of last activity per player. Returned as RFC 1123 format and null if no connection has been made."""
|
||||
for team, players in all_players.items():
|
||||
for player in players:
|
||||
activity_timers.append({"team": team, "player": player, "time": None})
|
||||
|
||||
for (team, player), timestamp in tracker_data._multisave.get("client_activity_timers", []):
|
||||
for entry in activity_timers:
|
||||
if entry["team"] == team and entry["player"] == player:
|
||||
entry["time"] = datetime.fromtimestamp(timestamp, timezone.utc)
|
||||
break
|
||||
|
||||
connection_timers: list[PlayerTimer] = []
|
||||
"""Time of last connection per player. Returned as RFC 1123 format and null if no connection has been made."""
|
||||
for team, players in all_players.items():
|
||||
for player in players:
|
||||
connection_timers.append({"team": team, "player": player, "time": None})
|
||||
|
||||
for (team, player), timestamp in tracker_data._multisave.get("client_connection_timers", []):
|
||||
# find the matching entry
|
||||
for entry in connection_timers:
|
||||
if entry["team"] == team and entry["player"] == player:
|
||||
entry["time"] = datetime.fromtimestamp(timestamp, timezone.utc)
|
||||
break
|
||||
|
||||
player_status: list[PlayerStatus] = []
|
||||
"""The current client status for each player."""
|
||||
for team, players in all_players.items():
|
||||
for player in players:
|
||||
player_status.append({"team": team, "player": player, "status": tracker_data.get_player_client_status(team, player)})
|
||||
|
||||
return {
|
||||
"aliases": player_aliases,
|
||||
"player_items_received": player_items_received,
|
||||
"player_checks_done": player_checks_done,
|
||||
"total_checks_done": total_checks_done,
|
||||
"hints": hints,
|
||||
"activity_timers": activity_timers,
|
||||
"connection_timers": connection_timers,
|
||||
"player_status": player_status,
|
||||
}
|
||||
|
||||
|
||||
class PlayerGroups(TypedDict):
|
||||
slot: int
|
||||
name: str
|
||||
members: list[int]
|
||||
|
||||
|
||||
class PlayerSlotData(TypedDict):
|
||||
player: int
|
||||
slot_data: dict[str, Any]
|
||||
|
||||
|
||||
@api_endpoints.route("/static_tracker/<suuid:tracker>")
|
||||
@cache.memoize(timeout=300)
|
||||
def static_tracker_data(tracker: UUID) -> dict[str, Any]:
|
||||
"""
|
||||
Outputs json data to <root_path>/api/static_tracker/<id of current session tracker>.
|
||||
|
||||
:param tracker: UUID of current session tracker.
|
||||
|
||||
:return: Static tracking data for all players in the room. Typing and docstrings describe the format of each value.
|
||||
"""
|
||||
room: Room | None = Room.get(tracker=tracker)
|
||||
if not room:
|
||||
abort(404)
|
||||
tracker_data = TrackerData(room)
|
||||
|
||||
all_players: dict[int, list[int]] = tracker_data.get_all_players()
|
||||
|
||||
groups: list[PlayerGroups] = []
|
||||
"""The Slot ID of groups and the IDs of the group's members."""
|
||||
for team, players in tracker_data.get_all_slots().items():
|
||||
for player in players:
|
||||
slot_info = tracker_data.get_slot_info(player)
|
||||
if slot_info.type != SlotType.group or not slot_info.group_members:
|
||||
continue
|
||||
groups.append(
|
||||
{
|
||||
"slot": player,
|
||||
"name": slot_info.name,
|
||||
"members": list(slot_info.group_members),
|
||||
})
|
||||
break
|
||||
|
||||
player_locations_total: list[PlayerLocationsTotal] = []
|
||||
for team, players in all_players.items():
|
||||
for player in players:
|
||||
player_locations_total.append(
|
||||
{"team": team, "player": player, "total_locations": len(tracker_data.get_player_locations(player))})
|
||||
|
||||
return {
|
||||
"groups": groups,
|
||||
"datapackage": tracker_data._multidata["datapackage"],
|
||||
"player_locations_total": player_locations_total,
|
||||
}
|
||||
|
||||
# It should be exceedingly rare that slot data is needed, so it's separated out.
|
||||
@api_endpoints.route("/slot_data_tracker/<suuid:tracker>")
|
||||
@cache.memoize(timeout=300)
|
||||
def tracker_slot_data(tracker: UUID) -> list[PlayerSlotData]:
|
||||
"""
|
||||
Outputs json data to <root_path>/api/slot_data_tracker/<id of current session tracker>.
|
||||
|
||||
:param tracker: UUID of current session tracker.
|
||||
|
||||
:return: Slot data for all players in the room. Typing completely arbitrary per game.
|
||||
"""
|
||||
room: Room | None = Room.get(tracker=tracker)
|
||||
if not room:
|
||||
abort(404)
|
||||
tracker_data = TrackerData(room)
|
||||
|
||||
all_players: dict[int, list[int]] = tracker_data.get_all_players()
|
||||
|
||||
slot_data: list[PlayerSlotData] = []
|
||||
"""Slot data for each player."""
|
||||
for team, players in all_players.items():
|
||||
for player in players:
|
||||
slot_data.append({"player": player, "slot_data": tracker_data.get_slot_data(player)})
|
||||
break
|
||||
|
||||
return slot_data
|
||||
@@ -17,7 +17,7 @@ from .locker import Locker, AlreadyRunningException
|
||||
_stop_event = Event()
|
||||
|
||||
|
||||
def stop():
|
||||
def stop() -> None:
|
||||
"""Stops previously launched threads"""
|
||||
global _stop_event
|
||||
stop_event = _stop_event
|
||||
@@ -36,25 +36,39 @@ def handle_generation_failure(result: BaseException):
|
||||
logging.exception(e)
|
||||
|
||||
|
||||
def _mp_gen_game(gen_options: dict, meta: dict[str, Any] | None = None, owner=None, sid=None) -> PrimaryKey | None:
|
||||
def _mp_gen_game(
|
||||
gen_options: dict,
|
||||
meta: dict[str, Any] | None = None,
|
||||
owner=None,
|
||||
sid=None,
|
||||
timeout: int|None = None,
|
||||
) -> PrimaryKey | None:
|
||||
from setproctitle import setproctitle
|
||||
|
||||
setproctitle(f"Generator ({sid})")
|
||||
res = gen_game(gen_options, meta=meta, owner=owner, sid=sid)
|
||||
setproctitle(f"Generator (idle)")
|
||||
return res
|
||||
try:
|
||||
return gen_game(gen_options, meta=meta, owner=owner, sid=sid, timeout=timeout)
|
||||
finally:
|
||||
setproctitle(f"Generator (idle)")
|
||||
|
||||
|
||||
def launch_generator(pool: multiprocessing.pool.Pool, generation: Generation):
|
||||
def launch_generator(pool: multiprocessing.pool.Pool, generation: Generation, timeout: int|None) -> None:
|
||||
try:
|
||||
meta = json.loads(generation.meta)
|
||||
options = restricted_loads(generation.options)
|
||||
logging.info(f"Generating {generation.id} for {len(options)} players")
|
||||
pool.apply_async(_mp_gen_game, (options,),
|
||||
{"meta": meta,
|
||||
"sid": generation.id,
|
||||
"owner": generation.owner},
|
||||
handle_generation_success, handle_generation_failure)
|
||||
pool.apply_async(
|
||||
_mp_gen_game,
|
||||
(options,),
|
||||
{
|
||||
"meta": meta,
|
||||
"sid": generation.id,
|
||||
"owner": generation.owner,
|
||||
"timeout": timeout,
|
||||
},
|
||||
handle_generation_success,
|
||||
handle_generation_failure,
|
||||
)
|
||||
except Exception as e:
|
||||
generation.state = STATE_ERROR
|
||||
commit()
|
||||
@@ -135,6 +149,7 @@ def autogen(config: dict):
|
||||
|
||||
with multiprocessing.Pool(config["GENERATORS"], initializer=init_generator,
|
||||
initargs=(config,), maxtasksperchild=10) as generator_pool:
|
||||
job_time = config["JOB_TIME"]
|
||||
with db_session:
|
||||
to_start = select(generation for generation in Generation if generation.state == STATE_STARTED)
|
||||
|
||||
@@ -145,7 +160,7 @@ def autogen(config: dict):
|
||||
if sid:
|
||||
generation.delete()
|
||||
else:
|
||||
launch_generator(generator_pool, generation)
|
||||
launch_generator(generator_pool, generation, timeout=job_time)
|
||||
|
||||
commit()
|
||||
select(generation for generation in Generation if generation.state == STATE_ERROR).delete()
|
||||
@@ -157,7 +172,7 @@ def autogen(config: dict):
|
||||
generation for generation in Generation
|
||||
if generation.state == STATE_QUEUED).for_update()
|
||||
for generation in to_start:
|
||||
launch_generator(generator_pool, generation)
|
||||
launch_generator(generator_pool, generation, timeout=job_time)
|
||||
except AlreadyRunningException:
|
||||
logging.info("Autogen reports as already running, not starting another.")
|
||||
|
||||
|
||||
@@ -19,7 +19,10 @@ from pony.orm import commit, db_session, select
|
||||
|
||||
import Utils
|
||||
|
||||
from MultiServer import Context, server, auto_shutdown, ServerCommandProcessor, ClientMessageProcessor, load_server_cert
|
||||
from MultiServer import (
|
||||
Context, server, auto_shutdown, ServerCommandProcessor, ClientMessageProcessor, load_server_cert,
|
||||
server_per_message_deflate_factory,
|
||||
)
|
||||
from Utils import restricted_loads, cache_argsless
|
||||
from .locker import Locker
|
||||
from .models import Command, GameDataPackage, Room, db
|
||||
@@ -97,6 +100,7 @@ class WebHostContext(Context):
|
||||
self.main_loop.call_soon_threadsafe(cmdprocessor, command.commandtext)
|
||||
command.delete()
|
||||
commit()
|
||||
del commands
|
||||
time.sleep(5)
|
||||
|
||||
@db_session
|
||||
@@ -146,13 +150,13 @@ class WebHostContext(Context):
|
||||
self.location_name_groups = static_location_name_groups
|
||||
return self._load(multidata, game_data_packages, True)
|
||||
|
||||
@db_session
|
||||
def init_save(self, enabled: bool = True):
|
||||
self.saving = enabled
|
||||
if self.saving:
|
||||
savegame_data = Room.get(id=self.room_id).multisave
|
||||
if savegame_data:
|
||||
self.set_save(restricted_loads(Room.get(id=self.room_id).multisave))
|
||||
with db_session:
|
||||
savegame_data = Room.get(id=self.room_id).multisave
|
||||
if savegame_data:
|
||||
self.set_save(restricted_loads(Room.get(id=self.room_id).multisave))
|
||||
self._start_async_saving(atexit_save=False)
|
||||
threading.Thread(target=self.listen_to_db_commands, daemon=True).start()
|
||||
|
||||
@@ -282,8 +286,12 @@ def run_server_process(name: str, ponyconfig: dict, static_server_data: dict,
|
||||
assert ctx.server is None
|
||||
try:
|
||||
ctx.server = websockets.serve(
|
||||
functools.partial(server, ctx=ctx), ctx.host, ctx.port, ssl=get_ssl_context())
|
||||
|
||||
functools.partial(server, ctx=ctx),
|
||||
ctx.host,
|
||||
ctx.port,
|
||||
ssl=get_ssl_context(),
|
||||
extensions=[server_per_message_deflate_factory],
|
||||
)
|
||||
await ctx.server
|
||||
except OSError: # likely port in use
|
||||
ctx.server = websockets.serve(
|
||||
@@ -304,6 +312,7 @@ def run_server_process(name: str, ponyconfig: dict, static_server_data: dict,
|
||||
with db_session:
|
||||
room = Room.get(id=ctx.room_id)
|
||||
room.last_port = port
|
||||
del room
|
||||
else:
|
||||
ctx.logger.exception("Could not determine port. Likely hosting failure.")
|
||||
with db_session:
|
||||
@@ -322,6 +331,7 @@ def run_server_process(name: str, ponyconfig: dict, static_server_data: dict,
|
||||
with db_session:
|
||||
room = Room.get(id=room_id)
|
||||
room.last_port = -1
|
||||
del room
|
||||
logger.exception(e)
|
||||
raise
|
||||
else:
|
||||
@@ -333,11 +343,12 @@ def run_server_process(name: str, ponyconfig: dict, static_server_data: dict,
|
||||
ctx.save_dirty = False # make sure the saving thread does not write to DB after final wakeup
|
||||
ctx.exit_event.set() # make sure the saving thread stops at some point
|
||||
# NOTE: async saving should probably be an async task and could be merged with shutdown_task
|
||||
with (db_session):
|
||||
with db_session:
|
||||
# ensure the Room does not spin up again on its own, minute of safety buffer
|
||||
room = Room.get(id=room_id)
|
||||
room.last_activity = datetime.datetime.utcnow() - \
|
||||
datetime.timedelta(minutes=1, seconds=room.timeout)
|
||||
del room
|
||||
logging.info(f"Shutting down room {room_id} on {name}.")
|
||||
finally:
|
||||
await asyncio.sleep(5)
|
||||
|
||||
@@ -12,12 +12,11 @@ from flask import flash, redirect, render_template, request, session, url_for
|
||||
from pony.orm import commit, db_session
|
||||
|
||||
from BaseClasses import get_seed, seeddigits
|
||||
from Generate import PlandoOptions, handle_name
|
||||
from Generate import PlandoOptions, handle_name, mystery_argparse
|
||||
from Main import main as ERmain
|
||||
from Utils import __version__, restricted_dumps
|
||||
from Utils import __version__, restricted_dumps, DaemonThreadPoolExecutor
|
||||
from WebHostLib import app
|
||||
from settings import ServerOptions, GeneratorOptions
|
||||
from worlds.alttp.EntranceRandomizer import parse_arguments
|
||||
from .check import get_yaml_data, roll_options
|
||||
from .models import Generation, STATE_ERROR, STATE_QUEUED, Seed, UUID
|
||||
from .upload import upload_zip_to_db
|
||||
@@ -34,6 +33,7 @@ def get_meta(options_source: dict, race: bool = False) -> dict[str, list[str] |
|
||||
"release_mode": str(options_source.get("release_mode", ServerOptions.release_mode)),
|
||||
"remaining_mode": str(options_source.get("remaining_mode", ServerOptions.remaining_mode)),
|
||||
"collect_mode": str(options_source.get("collect_mode", ServerOptions.collect_mode)),
|
||||
"countdown_mode": str(options_source.get("countdown_mode", ServerOptions.countdown_mode)),
|
||||
"item_cheat": bool(int(options_source.get("item_cheat", not ServerOptions.disable_item_cheat))),
|
||||
"server_password": str(options_source.get("server_password", None)),
|
||||
}
|
||||
@@ -73,6 +73,10 @@ def generate(race=False):
|
||||
return render_template("generate.html", race=race, version=__version__)
|
||||
|
||||
|
||||
def format_exception(e: BaseException) -> str:
|
||||
return f"{e.__class__.__name__}: {e}"
|
||||
|
||||
|
||||
def start_generation(options: dict[str, dict | str], meta: dict[str, Any]):
|
||||
results, gen_options = roll_options(options, set(meta["plando_options"]))
|
||||
|
||||
@@ -93,7 +97,9 @@ def start_generation(options: dict[str, dict | str], meta: dict[str, Any]):
|
||||
except PicklingError as e:
|
||||
from .autolauncher import handle_generation_failure
|
||||
handle_generation_failure(e)
|
||||
return render_template("seedError.html", seed_error=("PicklingError: " + str(e)))
|
||||
meta["error"] = format_exception(e)
|
||||
details = json.dumps(meta, indent=4).strip()
|
||||
return render_template("seedError.html", seed_error=meta["error"], details=details)
|
||||
|
||||
commit()
|
||||
|
||||
@@ -101,16 +107,18 @@ def start_generation(options: dict[str, dict | str], meta: dict[str, Any]):
|
||||
else:
|
||||
try:
|
||||
seed_id = gen_game({name: vars(options) for name, options in gen_options.items()},
|
||||
meta=meta, owner=session["_id"].int)
|
||||
meta=meta, owner=session["_id"].int, timeout=app.config["JOB_TIME"])
|
||||
except BaseException as e:
|
||||
from .autolauncher import handle_generation_failure
|
||||
handle_generation_failure(e)
|
||||
return render_template("seedError.html", seed_error=(e.__class__.__name__ + ": " + str(e)))
|
||||
meta["error"] = format_exception(e)
|
||||
details = json.dumps(meta, indent=4).strip()
|
||||
return render_template("seedError.html", seed_error=meta["error"], details=details)
|
||||
|
||||
return redirect(url_for("view_seed", seed=seed_id))
|
||||
|
||||
|
||||
def gen_game(gen_options: dict, meta: dict[str, Any] | None = None, owner=None, sid=None):
|
||||
def gen_game(gen_options: dict, meta: dict[str, Any] | None = None, owner=None, sid=None, timeout: int|None = None):
|
||||
if meta is None:
|
||||
meta = {}
|
||||
|
||||
@@ -129,43 +137,47 @@ def gen_game(gen_options: dict, meta: dict[str, Any] | None = None, owner=None,
|
||||
|
||||
seedname = "W" + (f"{random.randint(0, pow(10, seeddigits) - 1)}".zfill(seeddigits))
|
||||
|
||||
erargs = parse_arguments(['--multi', str(playercount)])
|
||||
erargs.seed = seed
|
||||
erargs.name = {x: "" for x in range(1, playercount + 1)} # only so it can be overwritten in mystery
|
||||
erargs.spoiler = meta["generator_options"].get("spoiler", 0)
|
||||
erargs.race = race
|
||||
erargs.outputname = seedname
|
||||
erargs.outputpath = target.name
|
||||
erargs.teams = 1
|
||||
erargs.plando_options = PlandoOptions.from_set(meta.setdefault("plando_options",
|
||||
{"bosses", "items", "connections", "texts"}))
|
||||
erargs.skip_prog_balancing = False
|
||||
erargs.skip_output = False
|
||||
erargs.spoiler_only = False
|
||||
erargs.csv_output = False
|
||||
args = mystery_argparse([]) # Just to set up the Namespace with defaults
|
||||
args.multi = playercount
|
||||
args.seed = seed
|
||||
args.name = {x: "" for x in range(1, playercount + 1)} # only so it can be overwritten in mystery
|
||||
args.spoiler = meta["generator_options"].get("spoiler", 0)
|
||||
args.race = race
|
||||
args.outputname = seedname
|
||||
args.outputpath = target.name
|
||||
args.teams = 1
|
||||
args.plando_options = PlandoOptions.from_set(meta.setdefault("plando_options",
|
||||
{"bosses", "items", "connections", "texts"}))
|
||||
args.skip_prog_balancing = False
|
||||
args.skip_output = False
|
||||
args.spoiler_only = False
|
||||
args.csv_output = False
|
||||
args.sprite = dict.fromkeys(range(1, args.multi+1), None)
|
||||
args.sprite_pool = dict.fromkeys(range(1, args.multi+1), None)
|
||||
|
||||
name_counter = Counter()
|
||||
for player, (playerfile, settings) in enumerate(gen_options.items(), 1):
|
||||
for k, v in settings.items():
|
||||
if v is not None:
|
||||
if hasattr(erargs, k):
|
||||
getattr(erargs, k)[player] = v
|
||||
if hasattr(args, k):
|
||||
getattr(args, k)[player] = v
|
||||
else:
|
||||
setattr(erargs, k, {player: v})
|
||||
setattr(args, k, {player: v})
|
||||
|
||||
if not erargs.name[player]:
|
||||
erargs.name[player] = os.path.splitext(os.path.split(playerfile)[-1])[0]
|
||||
erargs.name[player] = handle_name(erargs.name[player], player, name_counter)
|
||||
if len(set(erargs.name.values())) != len(erargs.name):
|
||||
raise Exception(f"Names have to be unique. Names: {Counter(erargs.name.values())}")
|
||||
ERmain(erargs, seed, baked_server_options=meta["server_options"])
|
||||
if not args.name[player]:
|
||||
args.name[player] = os.path.splitext(os.path.split(playerfile)[-1])[0]
|
||||
args.name[player] = handle_name(args.name[player], player, name_counter)
|
||||
if len(set(args.name.values())) != len(args.name):
|
||||
raise Exception(f"Names have to be unique. Names: {Counter(args.name.values())}")
|
||||
ERmain(args, seed, baked_server_options=meta["server_options"])
|
||||
|
||||
return upload_to_db(target.name, sid, owner, race)
|
||||
thread_pool = concurrent.futures.ThreadPoolExecutor(max_workers=1)
|
||||
|
||||
thread_pool = DaemonThreadPoolExecutor(max_workers=1)
|
||||
thread = thread_pool.submit(task)
|
||||
|
||||
try:
|
||||
return thread.result(app.config["JOB_TIME"])
|
||||
return thread.result(timeout)
|
||||
except concurrent.futures.TimeoutError as e:
|
||||
if sid:
|
||||
with db_session:
|
||||
@@ -173,11 +185,14 @@ def gen_game(gen_options: dict, meta: dict[str, Any] | None = None, owner=None,
|
||||
if gen is not None:
|
||||
gen.state = STATE_ERROR
|
||||
meta = json.loads(gen.meta)
|
||||
meta["error"] = (
|
||||
"Allowed time for Generation exceeded, please consider generating locally instead. " +
|
||||
e.__class__.__name__ + ": " + str(e))
|
||||
meta["error"] = ("Allowed time for Generation exceeded, " +
|
||||
"please consider generating locally instead. " +
|
||||
format_exception(e))
|
||||
gen.meta = json.dumps(meta)
|
||||
commit()
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
# don't update db, retry next time
|
||||
raise
|
||||
except BaseException as e:
|
||||
if sid:
|
||||
with db_session:
|
||||
@@ -185,10 +200,15 @@ def gen_game(gen_options: dict, meta: dict[str, Any] | None = None, owner=None,
|
||||
if gen is not None:
|
||||
gen.state = STATE_ERROR
|
||||
meta = json.loads(gen.meta)
|
||||
meta["error"] = (e.__class__.__name__ + ": " + str(e))
|
||||
meta["error"] = format_exception(e)
|
||||
gen.meta = json.dumps(meta)
|
||||
commit()
|
||||
raise
|
||||
finally:
|
||||
# free resources claimed by thread pool, if possible
|
||||
# NOTE: Timeout depends on the process being killed at some point
|
||||
# since we can't actually cancel a running gen at the moment.
|
||||
thread_pool.shutdown(wait=False, cancel_futures=True)
|
||||
|
||||
|
||||
@app.route('/wait/<suuid:seed>')
|
||||
@@ -202,7 +222,9 @@ def wait_seed(seed: UUID):
|
||||
if not generation:
|
||||
return "Generation not found."
|
||||
elif generation.state == STATE_ERROR:
|
||||
return render_template("seedError.html", seed_error=generation.meta)
|
||||
meta = json.loads(generation.meta)
|
||||
details = json.dumps(meta, indent=4).strip()
|
||||
return render_template("seedError.html", seed_error=meta["error"], details=details)
|
||||
return render_template("waitSeed.html", seed_id=seed_id)
|
||||
|
||||
|
||||
|
||||
@@ -3,10 +3,10 @@ import threading
|
||||
import json
|
||||
|
||||
from Utils import local_path, user_path
|
||||
from worlds.alttp.Rom import Sprite
|
||||
|
||||
|
||||
def update_sprites_lttp():
|
||||
from worlds.alttp.Rom import Sprite
|
||||
from tkinter import Tk
|
||||
from LttPAdjuster import get_image_for_sprite
|
||||
from LttPAdjuster import BackgroundTaskProgress
|
||||
|
||||
90
WebHostLib/markdown.py
Normal file
90
WebHostLib/markdown.py
Normal file
@@ -0,0 +1,90 @@
|
||||
import re
|
||||
from collections import Counter
|
||||
|
||||
import mistune
|
||||
from werkzeug.utils import secure_filename
|
||||
|
||||
|
||||
__all__ = [
|
||||
"ImgUrlRewriteInlineParser",
|
||||
'render_markdown',
|
||||
]
|
||||
|
||||
|
||||
class ImgUrlRewriteInlineParser(mistune.InlineParser):
|
||||
relative_url_base: str
|
||||
|
||||
def __init__(self, relative_url_base: str, hard_wrap: bool = False) -> None:
|
||||
super().__init__(hard_wrap)
|
||||
self.relative_url_base = relative_url_base
|
||||
|
||||
@staticmethod
|
||||
def _find_game_name_by_folder_name(name: str) -> str | None:
|
||||
from worlds.AutoWorld import AutoWorldRegister
|
||||
|
||||
for world_name, world_type in AutoWorldRegister.world_types.items():
|
||||
if world_type.__module__ == f"worlds.{name}":
|
||||
return world_name
|
||||
return None
|
||||
|
||||
def parse_link(self, m: re.Match[str], state: mistune.InlineState) -> int | None:
|
||||
res = super().parse_link(m, state)
|
||||
if res is not None and state.tokens and state.tokens[-1]["type"] == "image":
|
||||
image_token = state.tokens[-1]
|
||||
url: str = image_token["attrs"]["url"]
|
||||
if not url.startswith("/") and not "://" in url:
|
||||
# replace relative URL to another world's doc folder with the webhost folder layout
|
||||
if url.startswith("../../") and "/docs/" in self.relative_url_base:
|
||||
parts = url.split("/", 4)
|
||||
if parts[2] != ".." and parts[3] == "docs":
|
||||
game_name = self._find_game_name_by_folder_name(parts[2])
|
||||
if game_name is not None:
|
||||
url = "/".join(parts[1:2] + [secure_filename(game_name)] + parts[4:])
|
||||
# change relative URL to point to deployment folder
|
||||
url = f"{self.relative_url_base}/{url}"
|
||||
image_token['attrs']['url'] = url
|
||||
return res
|
||||
|
||||
|
||||
def render_markdown(path: str, img_url_base: str | None = None) -> str:
|
||||
markdown = mistune.create_markdown(
|
||||
escape=False,
|
||||
plugins=[
|
||||
"strikethrough",
|
||||
"footnotes",
|
||||
"table",
|
||||
"speedup",
|
||||
],
|
||||
)
|
||||
|
||||
heading_id_count: Counter[str] = Counter()
|
||||
|
||||
def heading_id(text: str) -> str:
|
||||
nonlocal heading_id_count
|
||||
|
||||
# there is no good way to do this without regex
|
||||
s = re.sub(r"[^\w\- ]", "", text.lower()).replace(" ", "-").strip("-")
|
||||
n = heading_id_count[s]
|
||||
heading_id_count[s] += 1
|
||||
if n > 0:
|
||||
s += f"-{n}"
|
||||
return s
|
||||
|
||||
def id_hook(_: mistune.Markdown, state: mistune.BlockState) -> None:
|
||||
for tok in state.tokens:
|
||||
if tok["type"] == "heading" and tok["attrs"]["level"] < 4:
|
||||
text = tok["text"]
|
||||
assert isinstance(text, str)
|
||||
unique_id = heading_id(text)
|
||||
tok["attrs"]["id"] = unique_id
|
||||
tok["text"] = f"<a href=\"#{unique_id}\">{text}</a>" # make header link to itself
|
||||
|
||||
markdown.before_render_hooks.append(id_hook)
|
||||
if img_url_base:
|
||||
markdown.inline = ImgUrlRewriteInlineParser(img_url_base)
|
||||
|
||||
with open(path, encoding="utf-8-sig") as f:
|
||||
document = f.read()
|
||||
html = markdown(document)
|
||||
assert isinstance(html, str), "Unexpected mistune renderer in render_markdown"
|
||||
return html
|
||||
@@ -9,6 +9,7 @@ from werkzeug.utils import secure_filename
|
||||
|
||||
from worlds.AutoWorld import AutoWorldRegister, World
|
||||
from . import app, cache
|
||||
from .markdown import render_markdown
|
||||
from .models import Seed, Room, Command, UUID, uuid4
|
||||
from Utils import title_sorted
|
||||
|
||||
@@ -27,49 +28,6 @@ def get_visible_worlds() -> dict[str, type(World)]:
|
||||
return worlds
|
||||
|
||||
|
||||
def render_markdown(path: str) -> str:
|
||||
import mistune
|
||||
from collections import Counter
|
||||
|
||||
markdown = mistune.create_markdown(
|
||||
escape=False,
|
||||
plugins=[
|
||||
"strikethrough",
|
||||
"footnotes",
|
||||
"table",
|
||||
"speedup",
|
||||
],
|
||||
)
|
||||
|
||||
heading_id_count: Counter[str] = Counter()
|
||||
|
||||
def heading_id(text: str) -> str:
|
||||
nonlocal heading_id_count
|
||||
import re # there is no good way to do this without regex
|
||||
|
||||
s = re.sub(r"[^\w\- ]", "", text.lower()).replace(" ", "-").strip("-")
|
||||
n = heading_id_count[s]
|
||||
heading_id_count[s] += 1
|
||||
if n > 0:
|
||||
s += f"-{n}"
|
||||
return s
|
||||
|
||||
def id_hook(_: mistune.Markdown, state: mistune.BlockState) -> None:
|
||||
for tok in state.tokens:
|
||||
if tok["type"] == "heading" and tok["attrs"]["level"] < 4:
|
||||
text = tok["text"]
|
||||
assert isinstance(text, str)
|
||||
unique_id = heading_id(text)
|
||||
tok["attrs"]["id"] = unique_id
|
||||
tok["text"] = f"<a href=\"#{unique_id}\">{text}</a>" # make header link to itself
|
||||
|
||||
markdown.before_render_hooks.append(id_hook)
|
||||
|
||||
with open(path, encoding="utf-8-sig") as f:
|
||||
document = f.read()
|
||||
return markdown(document)
|
||||
|
||||
|
||||
@app.errorhandler(404)
|
||||
@app.errorhandler(jinja2.exceptions.TemplateNotFound)
|
||||
def page_not_found(err):
|
||||
@@ -91,10 +49,9 @@ def game_info(game, lang):
|
||||
theme = get_world_theme(game)
|
||||
secure_game_name = secure_filename(game)
|
||||
lang = secure_filename(lang)
|
||||
document = render_markdown(os.path.join(
|
||||
app.static_folder, "generated", "docs",
|
||||
secure_game_name, f"{lang}_{secure_game_name}.md"
|
||||
))
|
||||
file_dir = os.path.join(app.static_folder, "generated", "docs", secure_game_name)
|
||||
file_dir_url = url_for("static", filename=f"generated/docs/{secure_game_name}")
|
||||
document = render_markdown(os.path.join(file_dir, f"{lang}_{secure_game_name}.md"), file_dir_url)
|
||||
return render_template(
|
||||
"markdown_document.html",
|
||||
title=f"{game} Guide",
|
||||
@@ -119,10 +76,9 @@ def tutorial(game: str, file: str):
|
||||
theme = get_world_theme(game)
|
||||
secure_game_name = secure_filename(game)
|
||||
file = secure_filename(file)
|
||||
document = render_markdown(os.path.join(
|
||||
app.static_folder, "generated", "docs",
|
||||
secure_game_name, file+".md"
|
||||
))
|
||||
file_dir = os.path.join(app.static_folder, "generated", "docs", secure_game_name)
|
||||
file_dir_url = url_for("static", filename=f"generated/docs/{secure_game_name}")
|
||||
document = render_markdown(os.path.join(file_dir, f"{file}.md"), file_dir_url)
|
||||
return render_template(
|
||||
"markdown_document.html",
|
||||
title=f"{game} Guide",
|
||||
@@ -133,6 +89,15 @@ def tutorial(game: str, file: str):
|
||||
return abort(404)
|
||||
|
||||
|
||||
@app.route('/tutorial/<string:game>/<string:file>/<string:lang>')
|
||||
def tutorial_redirect(game: str, file: str, lang: str):
|
||||
"""
|
||||
Permanent redirect old tutorial URLs to new ones to keep search engines happy.
|
||||
e.g. /tutorial/Archipelago/setup/en -> /tutorial/Archipelago/setup_en
|
||||
"""
|
||||
return redirect(url_for("tutorial", game=game, file=f"{file}_{lang}"), code=301)
|
||||
|
||||
|
||||
@app.route('/tutorial/')
|
||||
@cache.cached()
|
||||
def tutorial_landing():
|
||||
@@ -251,7 +216,10 @@ def host_room(room: UUID):
|
||||
# indicate that the page should reload to get the assigned port
|
||||
should_refresh = ((not room.last_port and now - room.creation_time < datetime.timedelta(seconds=3))
|
||||
or room.last_activity < now - datetime.timedelta(seconds=room.timeout))
|
||||
with db_session:
|
||||
|
||||
if now - room.last_activity > datetime.timedelta(minutes=1):
|
||||
# we only set last_activity if needed, otherwise parallel access on /room will cause an internal server error
|
||||
# due to "pony.orm.core.OptimisticCheckError: Object Room was updated outside of current transaction"
|
||||
room.last_activity = now # will trigger a spinup, if it's not already running
|
||||
|
||||
browser_tokens = "Mozilla", "Chrome", "Safari"
|
||||
@@ -259,9 +227,9 @@ def host_room(room: UUID):
|
||||
or "Discordbot" in request.user_agent.string
|
||||
or not any(browser_token in request.user_agent.string for browser_token in browser_tokens))
|
||||
|
||||
def get_log(max_size: int = 0 if automated else 1024000) -> str:
|
||||
def get_log(max_size: int = 0 if automated else 1024000) -> Tuple[str, int]:
|
||||
if max_size == 0:
|
||||
return "…"
|
||||
return "…", 0
|
||||
try:
|
||||
with open(os.path.join("logs", str(room.id) + ".txt"), "rb") as log:
|
||||
raw_size = 0
|
||||
@@ -272,9 +240,9 @@ def host_room(room: UUID):
|
||||
break
|
||||
raw_size += len(block)
|
||||
fragments.append(block.decode("utf-8"))
|
||||
return "".join(fragments)
|
||||
return "".join(fragments), raw_size
|
||||
except FileNotFoundError:
|
||||
return ""
|
||||
return "", 0
|
||||
|
||||
return render_template("hostRoom.html", room=room, should_refresh=should_refresh, get_log=get_log)
|
||||
|
||||
|
||||
@@ -76,7 +76,7 @@ def filter_rst_to_html(text: str) -> str:
|
||||
lines = text.splitlines()
|
||||
text = lines[0] + "\n" + dedent("\n".join(lines[1:]))
|
||||
|
||||
return publish_parts(text, writer_name='html', settings=None, settings_overrides={
|
||||
return publish_parts(text, writer='html', settings=None, settings_overrides={
|
||||
'raw_enable': False,
|
||||
'file_insertion_enabled': False,
|
||||
'output_encoding': 'unicode'
|
||||
@@ -155,7 +155,9 @@ def generate_weighted_yaml(game: str):
|
||||
options = {}
|
||||
|
||||
for key, val in request.form.items():
|
||||
if "||" not in key:
|
||||
if val == "_ensure-empty-list":
|
||||
options[key] = {}
|
||||
elif "||" not in key:
|
||||
if len(str(val)) == 0:
|
||||
continue
|
||||
|
||||
@@ -212,8 +214,11 @@ def generate_yaml(game: str):
|
||||
if request.method == "POST":
|
||||
options = {}
|
||||
intent_generate = False
|
||||
|
||||
for key, val in request.form.items(multi=True):
|
||||
if key in options:
|
||||
if val == "_ensure-empty-list":
|
||||
options[key] = []
|
||||
elif options.get(key):
|
||||
if not isinstance(options[key], list):
|
||||
options[key] = [options[key]]
|
||||
options[key].append(val)
|
||||
@@ -226,7 +231,7 @@ def generate_yaml(game: str):
|
||||
if key_parts[-1] == "qty":
|
||||
if key_parts[0] not in options:
|
||||
options[key_parts[0]] = {}
|
||||
if val != "0":
|
||||
if val and val != "0":
|
||||
options[key_parts[0]][key_parts[1]] = int(val)
|
||||
del options[key]
|
||||
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
flask>=3.1.1
|
||||
werkzeug>=3.1.3
|
||||
pony>=0.7.19
|
||||
pony>=0.7.19; python_version <= '3.12'
|
||||
pony @ git+https://github.com/black-sliver/pony@7feb1221953b7fa4a6735466bf21a8b4d35e33ba#0.7.19; python_version >= '3.13'
|
||||
waitress>=3.0.2
|
||||
Flask-Caching>=2.3.0
|
||||
Flask-Compress>=1.17
|
||||
Flask-Compress==1.18 # pkg_resources can't resolve the "backports.zstd" dependency of >1.18, breaking ModuleUpdate.py
|
||||
Flask-Limiter>=3.12
|
||||
bokeh>=3.6.3
|
||||
markupsafe>=3.0.2
|
||||
setproctitle>=1.3.5
|
||||
mistune>=3.1.3
|
||||
docutils>=0.22.2
|
||||
|
||||
@@ -66,7 +66,7 @@ is to ensure items necessary to complete the game will be accessible to the play
|
||||
rules allowing certain items to be placed in normally unreachable locations, provided the player has indicated they are
|
||||
comfortable exploiting certain glitches in the game.
|
||||
|
||||
## I want to add a game to the Archipelago randomizer. How do I do that?
|
||||
## I want to develop a game implementation for Archipelago. How do I do that?
|
||||
|
||||
The best way to get started is to take a look at our code on GitHub:
|
||||
[Archipelago GitHub Page](https://github.com/ArchipelagoMW/Archipelago).
|
||||
@@ -77,4 +77,5 @@ There, you will find examples of games in the `worlds` folder:
|
||||
You may also find developer documentation in the `docs` folder:
|
||||
[/docs Folder in Archipelago Code](https://github.com/ArchipelagoMW/Archipelago/tree/main/docs).
|
||||
|
||||
If you have more questions, feel free to ask in the **#ap-world-dev** channel on our Discord.
|
||||
If you have more questions regarding development of a game implementation, feel free to ask in the **#ap-world-dev**
|
||||
channel on our Discord.
|
||||
|
||||
@@ -1,49 +1,43 @@
|
||||
let updateSection = (sectionName, fakeDOM) => {
|
||||
document.getElementById(sectionName).innerHTML = fakeDOM.getElementById(sectionName).innerHTML;
|
||||
}
|
||||
|
||||
window.addEventListener('load', () => {
|
||||
// Reload tracker every 15 seconds
|
||||
const url = window.location;
|
||||
setInterval(() => {
|
||||
const ajax = new XMLHttpRequest();
|
||||
ajax.onreadystatechange = () => {
|
||||
if (ajax.readyState !== 4) { return; }
|
||||
// Reload tracker every 60 seconds (sync'd)
|
||||
const url = window.location;
|
||||
// Note: This synchronization code is adapted from code in trackerCommon.js
|
||||
const targetSecond = parseInt(document.getElementById('player-tracker').getAttribute('data-second')) + 3;
|
||||
console.log("Target second of refresh: " + targetSecond);
|
||||
|
||||
// Create a fake DOM using the returned HTML
|
||||
const domParser = new DOMParser();
|
||||
const fakeDOM = domParser.parseFromString(ajax.responseText, 'text/html');
|
||||
|
||||
// Update item tracker
|
||||
document.getElementById('inventory-table').innerHTML = fakeDOM.getElementById('inventory-table').innerHTML;
|
||||
// Update only counters in the location-table
|
||||
let counters = document.getElementsByClassName('counter');
|
||||
const fakeCounters = fakeDOM.getElementsByClassName('counter');
|
||||
for (let i = 0; i < counters.length; i++) {
|
||||
counters[i].innerHTML = fakeCounters[i].innerHTML;
|
||||
}
|
||||
let getSleepTimeSeconds = () => {
|
||||
// -40 % 60 is -40, which is absolutely wrong and should burn
|
||||
var sleepSeconds = (((targetSecond - new Date().getSeconds()) % 60) + 60) % 60;
|
||||
return sleepSeconds || 60;
|
||||
};
|
||||
ajax.open('GET', url);
|
||||
ajax.send();
|
||||
}, 15000)
|
||||
|
||||
// Collapsible advancement sections
|
||||
const categories = document.getElementsByClassName("location-category");
|
||||
for (let category of categories) {
|
||||
let hide_id = category.id.split('_')[0];
|
||||
if (hide_id === 'Total') {
|
||||
continue;
|
||||
}
|
||||
category.addEventListener('click', function() {
|
||||
// Toggle the advancement list
|
||||
document.getElementById(hide_id).classList.toggle("hide");
|
||||
// Change text of the header
|
||||
const tab_header = document.getElementById(hide_id+'_header').children[0];
|
||||
const orig_text = tab_header.innerHTML;
|
||||
let new_text;
|
||||
if (orig_text.includes("▼")) {
|
||||
new_text = orig_text.replace("▼", "▲");
|
||||
}
|
||||
else {
|
||||
new_text = orig_text.replace("▲", "▼");
|
||||
}
|
||||
tab_header.innerHTML = new_text;
|
||||
});
|
||||
}
|
||||
let updateTracker = () => {
|
||||
const ajax = new XMLHttpRequest();
|
||||
ajax.onreadystatechange = () => {
|
||||
if (ajax.readyState !== 4) { return; }
|
||||
|
||||
// Create a fake DOM using the returned HTML
|
||||
const domParser = new DOMParser();
|
||||
const fakeDOM = domParser.parseFromString(ajax.responseText, 'text/html');
|
||||
|
||||
// Update dynamic sections
|
||||
updateSection('player-info', fakeDOM);
|
||||
updateSection('section-filler', fakeDOM);
|
||||
updateSection('section-terran', fakeDOM);
|
||||
updateSection('section-zerg', fakeDOM);
|
||||
updateSection('section-protoss', fakeDOM);
|
||||
updateSection('section-nova', fakeDOM);
|
||||
updateSection('section-kerrigan', fakeDOM);
|
||||
updateSection('section-keys', fakeDOM);
|
||||
updateSection('section-locations', fakeDOM);
|
||||
};
|
||||
ajax.open('GET', url);
|
||||
ajax.send();
|
||||
updater = setTimeout(updateTracker, getSleepTimeSeconds() * 1000);
|
||||
};
|
||||
window.updater = setTimeout(updateTracker, getSleepTimeSeconds() * 1000);
|
||||
});
|
||||
|
||||
@@ -28,7 +28,6 @@
|
||||
font-weight: normal;
|
||||
font-family: LondrinaSolid-Regular, sans-serif;
|
||||
text-transform: uppercase;
|
||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
||||
width: 100%;
|
||||
text-shadow: 1px 1px 4px #000000;
|
||||
}
|
||||
@@ -37,7 +36,6 @@
|
||||
font-size: 38px;
|
||||
font-weight: normal;
|
||||
font-family: LondrinaSolid-Light, sans-serif;
|
||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
||||
width: 100%;
|
||||
margin-top: 20px;
|
||||
margin-bottom: 0.5rem;
|
||||
@@ -50,7 +48,6 @@
|
||||
font-family: LexendDeca-Regular, sans-serif;
|
||||
text-transform: none;
|
||||
text-align: left;
|
||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
||||
width: 100%;
|
||||
margin-bottom: 0.5rem;
|
||||
}
|
||||
@@ -59,7 +56,6 @@
|
||||
font-family: LexendDeca-Regular, sans-serif;
|
||||
text-transform: none;
|
||||
font-size: 24px;
|
||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
||||
margin-bottom: 24px;
|
||||
}
|
||||
|
||||
@@ -67,14 +63,12 @@
|
||||
font-family: LexendDeca-Regular, sans-serif;
|
||||
text-transform: none;
|
||||
font-size: 22px;
|
||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
||||
}
|
||||
|
||||
.markdown h6, .markdown details summary.h6{
|
||||
font-family: LexendDeca-Regular, sans-serif;
|
||||
text-transform: none;
|
||||
font-size: 20px;
|
||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
||||
}
|
||||
|
||||
.markdown h4, .markdown h5, .markdown h6{
|
||||
|
||||
@@ -1,160 +1,279 @@
|
||||
#player-tracker-wrapper{
|
||||
margin: 0;
|
||||
*{
|
||||
margin: 0;
|
||||
font-family: "JuraBook", monospace;
|
||||
}
|
||||
body{
|
||||
--icon-size: 36px;
|
||||
--item-class-padding: 4px;
|
||||
}
|
||||
a{
|
||||
color: #1ae;
|
||||
}
|
||||
|
||||
#tracker-table td {
|
||||
vertical-align: top;
|
||||
/* Section colours */
|
||||
#player-info{
|
||||
background-color: #37a;
|
||||
}
|
||||
.player-tracker{
|
||||
max-width: 100%;
|
||||
}
|
||||
.tracker-section{
|
||||
background-color: grey;
|
||||
}
|
||||
#terran-items{
|
||||
background-color: #3a7;
|
||||
}
|
||||
#zerg-items{
|
||||
background-color: #d94;
|
||||
}
|
||||
#protoss-items{
|
||||
background-color: #37a;
|
||||
}
|
||||
#nova-items{
|
||||
background-color: #777;
|
||||
}
|
||||
#kerrigan-items{
|
||||
background-color: #a37;
|
||||
}
|
||||
#keys{
|
||||
background-color: #aa2;
|
||||
}
|
||||
|
||||
.inventory-table-area{
|
||||
border: 2px solid #000000;
|
||||
border-radius: 4px;
|
||||
padding: 3px 10px 3px 10px;
|
||||
/* Sections */
|
||||
.section-body{
|
||||
display: flex;
|
||||
flex-flow: row wrap;
|
||||
justify-content: flex-start;
|
||||
align-items: flex-start;
|
||||
padding-bottom: 3px;
|
||||
}
|
||||
.section-body-2{
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
}
|
||||
.tracker-section:has(input.collapse-section[type=checkbox]:checked) .section-body,
|
||||
.tracker-section:has(input.collapse-section[type=checkbox]:checked) .section-body-2{
|
||||
display: none;
|
||||
}
|
||||
.section-title{
|
||||
position: relative;
|
||||
border-bottom: 3px solid black;
|
||||
/* Prevent text selection */
|
||||
user-select: none;
|
||||
-webkit-user-select: none;
|
||||
-ms-user-select: none;
|
||||
}
|
||||
input[type="checkbox"]{
|
||||
position: absolute;
|
||||
cursor: pointer;
|
||||
opacity: 0;
|
||||
z-index: 1;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
.section-title:hover h2{
|
||||
text-shadow: 0 0 4px #ddd;
|
||||
}
|
||||
.f {
|
||||
display: flex;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.inventory-table-area:has(.inventory-table-terran) {
|
||||
width: 690px;
|
||||
background-color: #525494;
|
||||
/* Acquire item filters */
|
||||
.tracker-section img{
|
||||
height: 100%;
|
||||
width: var(--icon-size);
|
||||
height: var(--icon-size);
|
||||
background-color: black;
|
||||
}
|
||||
.unacquired, .lvl-0 .f{
|
||||
filter: grayscale(100%) contrast(80%) brightness(42%) blur(0.5px);
|
||||
}
|
||||
.spacer{
|
||||
width: var(--icon-size);
|
||||
height: var(--icon-size);
|
||||
}
|
||||
|
||||
.inventory-table-area:has(.inventory-table-zerg) {
|
||||
width: 360px;
|
||||
background-color: #9d60d2;
|
||||
/* Item groups */
|
||||
.item-class{
|
||||
display: flex;
|
||||
flex-flow: column;
|
||||
justify-content: center;
|
||||
padding: var(--item-class-padding);
|
||||
}
|
||||
.item-class-header{
|
||||
display: flex;
|
||||
flex-flow: row;
|
||||
}
|
||||
.item-class-upgrades{
|
||||
/* Note: {display: flex; flex-flow: column wrap} */
|
||||
/* just breaks on Firefox (width does not scale to content) */
|
||||
display: grid;
|
||||
grid-template-rows: repeat(4, auto);
|
||||
grid-auto-flow: column;
|
||||
}
|
||||
|
||||
.inventory-table-area:has(.inventory-table-protoss) {
|
||||
width: 400px;
|
||||
background-color: #d2b260;
|
||||
/* Subsections */
|
||||
.section-toc{
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
}
|
||||
.toc-box{
|
||||
position: relative;
|
||||
padding-left: 15px;
|
||||
padding-right: 15px;
|
||||
}
|
||||
.toc-box:hover{
|
||||
text-shadow: 0 0 7px white;
|
||||
}
|
||||
.ss-header{
|
||||
position: relative;
|
||||
text-align: center;
|
||||
writing-mode: sideways-lr;
|
||||
user-select: none;
|
||||
padding-top: 5px;
|
||||
font-size: 115%;
|
||||
}
|
||||
.tracker-section:has(input.ss-1-toggle:checked) .ss-1{
|
||||
display: none;
|
||||
}
|
||||
.tracker-section:has(input.ss-2-toggle:checked) .ss-2{
|
||||
display: none;
|
||||
}
|
||||
.tracker-section:has(input.ss-3-toggle:checked) .ss-3{
|
||||
display: none;
|
||||
}
|
||||
.tracker-section:has(input.ss-4-toggle:checked) .ss-4{
|
||||
display: none;
|
||||
}
|
||||
.tracker-section:has(input.ss-5-toggle:checked) .ss-5{
|
||||
display: none;
|
||||
}
|
||||
.tracker-section:has(input.ss-6-toggle:checked) .ss-6{
|
||||
display: none;
|
||||
}
|
||||
.tracker-section:has(input.ss-7-toggle:checked) .ss-7{
|
||||
display: none;
|
||||
}
|
||||
.tracker-section:has(input.ss-1-toggle:hover) .ss-1{
|
||||
background-color: #fff5;
|
||||
box-shadow: 0 0 1px 1px white;
|
||||
}
|
||||
.tracker-section:has(input.ss-2-toggle:hover) .ss-2{
|
||||
background-color: #fff5;
|
||||
box-shadow: 0 0 1px 1px white;
|
||||
}
|
||||
.tracker-section:has(input.ss-3-toggle:hover) .ss-3{
|
||||
background-color: #fff5;
|
||||
box-shadow: 0 0 1px 1px white;
|
||||
}
|
||||
.tracker-section:has(input.ss-4-toggle:hover) .ss-4{
|
||||
background-color: #fff5;
|
||||
box-shadow: 0 0 1px 1px white;
|
||||
}
|
||||
.tracker-section:has(input.ss-5-toggle:hover) .ss-5{
|
||||
background-color: #fff5;
|
||||
box-shadow: 0 0 1px 1px white;
|
||||
}
|
||||
.tracker-section:has(input.ss-6-toggle:hover) .ss-6{
|
||||
background-color: #fff5;
|
||||
box-shadow: 0 0 1px 1px white;
|
||||
}
|
||||
.tracker-section:has(input.ss-7-toggle:hover) .ss-7{
|
||||
background-color: #fff5;
|
||||
box-shadow: 0 0 1px 1px white;
|
||||
}
|
||||
|
||||
#tracker-table .inventory-table td{
|
||||
width: 40px;
|
||||
height: 40px;
|
||||
text-align: center;
|
||||
vertical-align: middle;
|
||||
/* Progressive items */
|
||||
.progressive{
|
||||
max-height: var(--icon-size);
|
||||
display: contents;
|
||||
}
|
||||
|
||||
.inventory-table td.title{
|
||||
padding-top: 10px;
|
||||
height: 20px;
|
||||
font-family: "JuraBook", monospace;
|
||||
font-size: 16px;
|
||||
font-weight: bold;
|
||||
.lvl-0 > :nth-child(2),
|
||||
.lvl-0 > :nth-child(3),
|
||||
.lvl-0 > :nth-child(4),
|
||||
.lvl-0 > :nth-child(5){
|
||||
display: none;
|
||||
}
|
||||
.lvl-1 > :nth-child(2),
|
||||
.lvl-1 > :nth-child(3),
|
||||
.lvl-1 > :nth-child(4),
|
||||
.lvl-1 > :nth-child(5){
|
||||
display: none;
|
||||
}
|
||||
.lvl-2 > :nth-child(1),
|
||||
.lvl-2 > :nth-child(3),
|
||||
.lvl-2 > :nth-child(4),
|
||||
.lvl-2 > :nth-child(5){
|
||||
display: none;
|
||||
}
|
||||
.lvl-3 > :nth-child(1),
|
||||
.lvl-3 > :nth-child(2),
|
||||
.lvl-3 > :nth-child(4),
|
||||
.lvl-3 > :nth-child(5){
|
||||
display: none;
|
||||
}
|
||||
.lvl-4 > :nth-child(1),
|
||||
.lvl-4 > :nth-child(2),
|
||||
.lvl-4 > :nth-child(3),
|
||||
.lvl-4 > :nth-child(5){
|
||||
display: none;
|
||||
}
|
||||
.lvl-5 > :nth-child(1),
|
||||
.lvl-5 > :nth-child(2),
|
||||
.lvl-5 > :nth-child(3),
|
||||
.lvl-5 > :nth-child(4){
|
||||
display: none;
|
||||
}
|
||||
|
||||
.inventory-table img{
|
||||
height: 100%;
|
||||
max-width: 40px;
|
||||
max-height: 40px;
|
||||
border: 1px solid #000000;
|
||||
filter: grayscale(100%) contrast(75%) brightness(20%);
|
||||
background-color: black;
|
||||
/* Filler item counters */
|
||||
.item-counter{
|
||||
display: table;
|
||||
text-align: center;
|
||||
padding: var(--item-class-padding);
|
||||
}
|
||||
.item-count{
|
||||
display: table-cell;
|
||||
vertical-align: middle;
|
||||
padding-left: 3px;
|
||||
padding-right: 15px;
|
||||
}
|
||||
|
||||
.inventory-table img.acquired{
|
||||
filter: none;
|
||||
background-color: black;
|
||||
/* Hidden items */
|
||||
.hidden-class:not(:has(img.acquired)){
|
||||
display: none;
|
||||
}
|
||||
.hidden-item:not(.acquired){
|
||||
display:none;
|
||||
}
|
||||
|
||||
.inventory-table .tint-terran img.acquired {
|
||||
filter: sepia(100%) saturate(300%) brightness(130%) hue-rotate(120deg)
|
||||
/* Keys */
|
||||
#keys ol, #keys ul{
|
||||
columns: 3;
|
||||
-webkit-columns: 3;
|
||||
-moz-columns: 3;
|
||||
}
|
||||
#keys li{
|
||||
padding-right: 15pt;
|
||||
}
|
||||
|
||||
.inventory-table .tint-protoss img.acquired {
|
||||
filter: sepia(100%) saturate(1000%) brightness(110%) hue-rotate(180deg)
|
||||
/* Locations */
|
||||
#section-locations{
|
||||
padding-left: 5px;
|
||||
}
|
||||
@media only screen and (min-width: 120ch){
|
||||
#section-locations ul{
|
||||
columns: 2;
|
||||
-webkit-columns: 2;
|
||||
-moz-columns: 2;
|
||||
}
|
||||
}
|
||||
#locations li.checked{
|
||||
list-style-type: "✔ ";
|
||||
}
|
||||
|
||||
.inventory-table .tint-level-1 img.acquired {
|
||||
filter: sepia(100%) saturate(1000%) brightness(110%) hue-rotate(60deg)
|
||||
}
|
||||
|
||||
.inventory-table .tint-level-2 img.acquired {
|
||||
filter: sepia(100%) saturate(1000%) brightness(110%) hue-rotate(60deg) hue-rotate(120deg)
|
||||
}
|
||||
|
||||
.inventory-table .tint-level-3 img.acquired {
|
||||
filter: sepia(100%) saturate(1000%) brightness(110%) hue-rotate(60deg) hue-rotate(240deg)
|
||||
}
|
||||
|
||||
.inventory-table div.counted-item {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.inventory-table div.item-count {
|
||||
width: 160px;
|
||||
text-align: left;
|
||||
color: black;
|
||||
font-family: "JuraBook", monospace;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
#location-table{
|
||||
border: 2px solid #000000;
|
||||
border-radius: 4px;
|
||||
background-color: #87b678;
|
||||
padding: 10px 3px 3px;
|
||||
font-family: "JuraBook", monospace;
|
||||
font-size: 16px;
|
||||
font-weight: bold;
|
||||
cursor: default;
|
||||
}
|
||||
|
||||
#location-table table{
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
#location-table th{
|
||||
vertical-align: middle;
|
||||
text-align: left;
|
||||
padding-right: 10px;
|
||||
}
|
||||
|
||||
#location-table td{
|
||||
padding-top: 2px;
|
||||
padding-bottom: 2px;
|
||||
line-height: 20px;
|
||||
}
|
||||
|
||||
#location-table td.counter {
|
||||
text-align: right;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
#location-table td.toggle-arrow {
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
#location-table tr#Total-header {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
#location-table img{
|
||||
height: 100%;
|
||||
max-width: 30px;
|
||||
max-height: 30px;
|
||||
}
|
||||
|
||||
#location-table tbody.locations {
|
||||
font-size: 16px;
|
||||
}
|
||||
|
||||
#location-table td.location-name {
|
||||
padding-left: 16px;
|
||||
}
|
||||
|
||||
#location-table td:has(.location-column) {
|
||||
vertical-align: top;
|
||||
}
|
||||
|
||||
#location-table .location-column {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
#location-table .location-column .spacer {
|
||||
min-height: 24px;
|
||||
}
|
||||
|
||||
.hide {
|
||||
display: none;
|
||||
/* Allowing scrolling down a little further */
|
||||
.bottom-padding{
|
||||
min-height: 33vh;
|
||||
}
|
||||
3965
WebHostLib/static/styles/sc2TrackerAtlas.css
Normal file
3965
WebHostLib/static/styles/sc2TrackerAtlas.css
Normal file
File diff suppressed because it is too large
Load Diff
@@ -72,3 +72,13 @@ code{
|
||||
padding-right: 0.25rem;
|
||||
color: #000000;
|
||||
}
|
||||
|
||||
code.grassy {
|
||||
background-color: #b5e9a4;
|
||||
border: 1px solid #2a6c2f;
|
||||
white-space: preserve;
|
||||
text-align: left;
|
||||
display: block;
|
||||
font-size: 14px;
|
||||
line-height: 20px;
|
||||
}
|
||||
|
||||
@@ -13,3 +13,7 @@
|
||||
min-height: 360px;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
h2, h4 {
|
||||
color: #ffffff;
|
||||
}
|
||||
|
||||
@@ -98,7 +98,7 @@
|
||||
<td>
|
||||
{% if hint.finding_player == player %}
|
||||
<b>{{ player_names_with_alias[(team, hint.finding_player)] }}</b>
|
||||
{% elif get_slot_info(team, hint.finding_player).type == 2 %}
|
||||
{% elif get_slot_info(hint.finding_player).type == 2 %}
|
||||
<i>{{ player_names_with_alias[(team, hint.finding_player)] }}</i>
|
||||
{% else %}
|
||||
<a href="{{ url_for("get_player_tracker", tracker=room.tracker, tracked_team=team, tracked_player=hint.finding_player) }}">
|
||||
@@ -109,7 +109,7 @@
|
||||
<td>
|
||||
{% if hint.receiving_player == player %}
|
||||
<b>{{ player_names_with_alias[(team, hint.receiving_player)] }}</b>
|
||||
{% elif get_slot_info(team, hint.receiving_player).type == 2 %}
|
||||
{% elif get_slot_info(hint.receiving_player).type == 2 %}
|
||||
<i>{{ player_names_with_alias[(team, hint.receiving_player)] }}</i>
|
||||
{% else %}
|
||||
<a href="{{ url_for("get_player_tracker", tracker=room.tracker, tracked_team=team, tracked_player=hint.receiving_player) }}">
|
||||
|
||||
@@ -58,8 +58,7 @@
|
||||
Open Log File...
|
||||
</a>
|
||||
</div>
|
||||
{% set log = get_log() -%}
|
||||
{%- set log_len = log | length - 1 if log.endswith("…") else log | length -%}
|
||||
{% set log, log_len = get_log() -%}
|
||||
<div id="logger" style="white-space: pre">{{ log }}</div>
|
||||
<script>
|
||||
let url = '{{ url_for('display_log', room = room.id) }}';
|
||||
|
||||
@@ -45,15 +45,15 @@
|
||||
{%- set current_sphere = loop.index %}
|
||||
{%- for player, sphere_location_ids in sphere.items() %}
|
||||
{%- set checked_locations = tracker_data.get_player_checked_locations(team, player) %}
|
||||
{%- set finder_game = tracker_data.get_player_game(team, player) %}
|
||||
{%- set player_location_data = tracker_data.get_player_locations(team, player) %}
|
||||
{%- set finder_game = tracker_data.get_player_game(player) %}
|
||||
{%- set player_location_data = tracker_data.get_player_locations(player) %}
|
||||
{%- for location_id in sphere_location_ids.intersection(checked_locations) %}
|
||||
<tr>
|
||||
{%- set item_id, receiver, item_flags = player_location_data[location_id] %}
|
||||
{%- set receiver_game = tracker_data.get_player_game(team, receiver) %}
|
||||
{%- set receiver_game = tracker_data.get_player_game(receiver) %}
|
||||
<td>{{ current_sphere }}</td>
|
||||
<td>{{ tracker_data.get_player_name(team, player) }}</td>
|
||||
<td>{{ tracker_data.get_player_name(team, receiver) }}</td>
|
||||
<td>{{ tracker_data.get_player_name(player) }}</td>
|
||||
<td>{{ tracker_data.get_player_name(receiver) }}</td>
|
||||
<td>{{ tracker_data.item_id_to_name[receiver_game][item_id] }}</td>
|
||||
<td>{{ tracker_data.location_id_to_name[finder_game][location_id] }}</td>
|
||||
<td>{{ finder_game }}</td>
|
||||
|
||||
@@ -22,14 +22,14 @@
|
||||
-%}
|
||||
<tr>
|
||||
<td>
|
||||
{% if get_slot_info(team, hint.finding_player).type == 2 %}
|
||||
{% if get_slot_info(hint.finding_player).type == 2 %}
|
||||
<i>{{ player_names_with_alias[(team, hint.finding_player)] }}</i>
|
||||
{% else %}
|
||||
{{ player_names_with_alias[(team, hint.finding_player)] }}
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
{% if get_slot_info(team, hint.receiving_player).type == 2 %}
|
||||
{% if get_slot_info(hint.receiving_player).type == 2 %}
|
||||
<i>{{ player_names_with_alias[(team, hint.receiving_player)] }}</i>
|
||||
{% else %}
|
||||
{{ player_names_with_alias[(team, hint.receiving_player)] }}
|
||||
|
||||
@@ -134,6 +134,7 @@
|
||||
|
||||
{% macro OptionList(option_name, option) %}
|
||||
{{ OptionTitle(option_name, option) }}
|
||||
<input type="hidden" id="{{ option_name }}-{{ key }}-hidden" name="{{ option_name }}" value="_ensure-empty-list"/>
|
||||
<div class="option-container">
|
||||
{% for key in (option.valid_keys if option.valid_keys is ordered else option.valid_keys|sort) %}
|
||||
<div class="option-entry">
|
||||
@@ -146,6 +147,7 @@
|
||||
|
||||
{% macro LocationSet(option_name, option) %}
|
||||
{{ OptionTitle(option_name, option) }}
|
||||
<input type="hidden" id="{{ option_name }}-{{ key }}-hidden" name="{{ option_name }}" value="_ensure-empty-list"/>
|
||||
<div class="option-container">
|
||||
{% for group_name in world.location_name_groups.keys()|sort %}
|
||||
{% if group_name != "Everywhere" %}
|
||||
@@ -169,6 +171,7 @@
|
||||
|
||||
{% macro ItemSet(option_name, option) %}
|
||||
{{ OptionTitle(option_name, option) }}
|
||||
<input type="hidden" id="{{ option_name }}-{{ key }}-hidden" name="{{ option_name }}" value="_ensure-empty-list"/>
|
||||
<div class="option-container">
|
||||
{% for group_name in world.item_name_groups.keys()|sort %}
|
||||
{% if group_name != "Everything" %}
|
||||
@@ -192,6 +195,7 @@
|
||||
|
||||
{% macro OptionSet(option_name, option) %}
|
||||
{{ OptionTitle(option_name, option) }}
|
||||
<input type="hidden" id="{{ option_name }}-{{ key }}-hidden" name="{{ option_name }}" value="_ensure-empty-list"/>
|
||||
<div class="option-container">
|
||||
{% for key in (option.valid_keys if option.valid_keys is ordered else option.valid_keys|sort) %}
|
||||
<div class="option-entry">
|
||||
|
||||
@@ -4,16 +4,20 @@
|
||||
|
||||
{% block head %}
|
||||
<title>Generation failed, please retry.</title>
|
||||
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename="styles/waitSeed.css") }}"/>
|
||||
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename='styles/waitSeed.css') }}"/>
|
||||
{% endblock %}
|
||||
|
||||
{% block body %}
|
||||
{% include 'header/oceanIslandHeader.html' %}
|
||||
<div id="wait-seed-wrapper" class="grass-island">
|
||||
<div id="wait-seed">
|
||||
<h1>Generation failed</h1>
|
||||
<h2>please retry</h2>
|
||||
{{ seed_error }}
|
||||
<h1>Generation Failed</h1>
|
||||
<h2>Please try again!</h2>
|
||||
<p>{{ seed_error }}</p>
|
||||
<h4>More details:</h4>
|
||||
<p>
|
||||
<code class="grassy">{{ details }}</code>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
@@ -11,32 +11,32 @@
|
||||
<h1>Site Map</h1>
|
||||
<h2>Base Pages</h2>
|
||||
<ul>
|
||||
<li><a href="/discord">Discord Link</a></li>
|
||||
<li><a href="/faq/en">F.A.Q. Page</a></li>
|
||||
<li><a href="/favicon.ico">Favicon</a></li>
|
||||
<li><a href="/generate">Generate Game Page</a></li>
|
||||
<li><a href="/">Homepage</a></li>
|
||||
<li><a href="/uploads">Host Game Page</a></li>
|
||||
<li><a href="/datapackage">Raw Data Package</a></li>
|
||||
<li><a href="{{ url_for('check')}}">Settings Validator</a></li>
|
||||
<li><a href="/sitemap">Site Map</a></li>
|
||||
<li><a href="/start-playing">Start Playing</a></li>
|
||||
<li><a href="/games">Supported Games Page</a></li>
|
||||
<li><a href="/tutorial">Tutorials Page</a></li>
|
||||
<li><a href="/user-content">User Content</a></li>
|
||||
<li><a href="{{url_for('stats')}}">Game Statistics</a></li>
|
||||
<li><a href="/glossary/en">Glossary</a></li>
|
||||
<li><a href="{{url_for("show_session")}}">Session / Login</a></li>
|
||||
<li><a href="{{ url_for('discord') }}">Discord Link</a></li>
|
||||
<li><a href="{{ url_for('faq', lang='en') }}">F.A.Q. Page</a></li>
|
||||
<li><a href="{{ url_for('favicon') }}">Favicon</a></li>
|
||||
<li><a href="{{ url_for('generate') }}">Generate Game Page</a></li>
|
||||
<li><a href="{{ url_for('landing') }}">Homepage</a></li>
|
||||
<li><a href="{{ url_for('uploads') }}">Host Game Page</a></li>
|
||||
<li><a href="{{ url_for('get_datapackage') }}">Raw Data Package</a></li>
|
||||
<li><a href="{{ url_for('check') }}">Settings Validator</a></li>
|
||||
<li><a href="{{ url_for('get_sitemap') }}">Site Map</a></li>
|
||||
<li><a href="{{ url_for('start_playing') }}">Start Playing</a></li>
|
||||
<li><a href="{{ url_for('games') }}">Supported Games Page</a></li>
|
||||
<li><a href="{{ url_for('tutorial_landing') }}">Tutorials Page</a></li>
|
||||
<li><a href="{{ url_for('user_content') }}">User Content</a></li>
|
||||
<li><a href="{{ url_for('stats') }}">Game Statistics</a></li>
|
||||
<li><a href="{{ url_for('glossary', lang='en') }}">Glossary</a></li>
|
||||
<li><a href="{{ url_for('show_session') }}">Session / Login</a></li>
|
||||
</ul>
|
||||
|
||||
<h2>Tutorials</h2>
|
||||
<ul>
|
||||
<li><a href="/tutorial/Archipelago/setup/en">Multiworld Setup Tutorial</a></li>
|
||||
<li><a href="/tutorial/Archipelago/mac/en">Setup Guide for Mac</a></li>
|
||||
<li><a href="/tutorial/Archipelago/commands/en">Server and Client Commands</a></li>
|
||||
<li><a href="/tutorial/Archipelago/advanced_settings/en">Advanced YAML Guide</a></li>
|
||||
<li><a href="/tutorial/Archipelago/triggers/en">Triggers Guide</a></li>
|
||||
<li><a href="/tutorial/Archipelago/plando/en">Plando Guide</a></li>
|
||||
<li><a href="{{ url_for('tutorial', game='Archipelago', file='setup_en') }}">Multiworld Setup Tutorial</a></li>
|
||||
<li><a href="{{ url_for('tutorial', game='Archipelago', file='mac_en') }}">Setup Guide for Mac</a></li>
|
||||
<li><a href="{{ url_for('tutorial', game='Archipelago', file='commands_en') }}">Server and Client Commands</a></li>
|
||||
<li><a href="{{ url_for('tutorial', game='Archipelago', file='advanced_settings_en') }}">Advanced YAML Guide</a></li>
|
||||
<li><a href="{{ url_for('tutorial', game='Archipelago', file='triggers_en') }}">Triggers Guide</a></li>
|
||||
<li><a href="{{ url_for('tutorial', game='Archipelago', file='plando_en') }}">Plando Guide</a></li>
|
||||
</ul>
|
||||
|
||||
<h2>Game Info Pages</h2>
|
||||
|
||||
@@ -31,6 +31,9 @@
|
||||
{% include 'header/oceanHeader.html' %}
|
||||
<div id="games" class="markdown">
|
||||
<h1>Currently Supported Games</h1>
|
||||
<p>Below are the games that are currently included with the Archipelago software. To play a game that is not on
|
||||
this page, please refer to the <a href="/tutorial/Archipelago/setup/en#playing-with-custom-worlds">playing with
|
||||
custom worlds</a> section of the setup guide.</p>
|
||||
<div class="js-only">
|
||||
<label for="game-search">Search for your game below!</label><br />
|
||||
<div class="page-controls">
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -139,6 +139,7 @@
|
||||
{% endmacro %}
|
||||
|
||||
{% macro OptionList(option_name, option) %}
|
||||
<input type="hidden" id="{{ option_name }}-{{ key }}-hidden" name="{{ option_name }}" value="_ensure-empty-list"/>
|
||||
<div class="list-container">
|
||||
{% for key in (option.valid_keys if option.valid_keys is ordered else option.valid_keys|sort) %}
|
||||
<div class="list-entry">
|
||||
@@ -158,6 +159,7 @@
|
||||
{% endmacro %}
|
||||
|
||||
{% macro LocationSet(option_name, option, world) %}
|
||||
<input type="hidden" id="{{ option_name }}-{{ key }}-hidden" name="{{ option_name }}" value="_ensure-empty-list"/>
|
||||
<div class="set-container">
|
||||
{% for group_name in world.location_name_groups.keys()|sort %}
|
||||
{% if group_name != "Everywhere" %}
|
||||
@@ -180,6 +182,7 @@
|
||||
{% endmacro %}
|
||||
|
||||
{% macro ItemSet(option_name, option, world) %}
|
||||
<input type="hidden" id="{{ option_name }}-{{ key }}-hidden" name="{{ option_name }}" value="_ensure-empty-list"/>
|
||||
<div class="set-container">
|
||||
{% for group_name in world.item_name_groups.keys()|sort %}
|
||||
{% if group_name != "Everything" %}
|
||||
@@ -202,6 +205,7 @@
|
||||
{% endmacro %}
|
||||
|
||||
{% macro OptionSet(option_name, option) %}
|
||||
<input type="hidden" id="{{ option_name }}-{{ key }}-hidden" name="{{ option_name }}" value="_ensure-empty-list"/>
|
||||
<div class="set-container">
|
||||
{% for key in (option.valid_keys if option.valid_keys is ordered else option.valid_keys|sort) %}
|
||||
<div class="set-entry">
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -20,6 +20,8 @@ from worlds.tloz.Items import item_game_ids
|
||||
from worlds.tloz.Locations import location_ids
|
||||
from worlds.tloz import Items, Locations, Rom
|
||||
|
||||
from settings import get_settings
|
||||
|
||||
SYSTEM_MESSAGE_ID = 0
|
||||
|
||||
CONNECTION_TIMING_OUT_STATUS = "Connection timing out. Please restart your emulator, then restart connector_tloz.lua"
|
||||
@@ -341,13 +343,12 @@ if __name__ == '__main__':
|
||||
# Text Mode to use !hint and such with games that have no text entry
|
||||
Utils.init_logging("ZeldaClient")
|
||||
|
||||
options = Utils.get_options()
|
||||
DISPLAY_MSGS = options["tloz_options"]["display_msgs"]
|
||||
DISPLAY_MSGS = get_settings()["tloz_options"]["display_msgs"]
|
||||
|
||||
|
||||
async def run_game(romfile: str) -> None:
|
||||
auto_start = typing.cast(typing.Union[bool, str],
|
||||
Utils.get_options()["tloz_options"].get("rom_start", True))
|
||||
get_settings()["tloz_options"].get("rom_start", True))
|
||||
if auto_start is True:
|
||||
import webbrowser
|
||||
webbrowser.open(romfile)
|
||||
|
||||
@@ -220,6 +220,8 @@
|
||||
<MessageBoxLabel>:
|
||||
theme_text_color: "Custom"
|
||||
text_color: 1, 1, 1, 1
|
||||
<MessageBox>:
|
||||
height: self.content.texture_size[1] + 80
|
||||
<ScrollBox>:
|
||||
layout: layout
|
||||
bar_width: "12dp"
|
||||
@@ -233,8 +235,3 @@
|
||||
spacing: 10
|
||||
size_hint_y: None
|
||||
height: self.minimum_height
|
||||
<MessageBoxLabel>:
|
||||
valign: "middle"
|
||||
halign: "center"
|
||||
text_size: self.width, None
|
||||
height: self.texture_size[1]
|
||||
|
||||
@@ -33,6 +33,10 @@ description: {{ yaml_dump("Default %s Template" % game) }}
|
||||
game: {{ yaml_dump(game) }}
|
||||
requires:
|
||||
version: {{ __version__ }} # Version of Archipelago required for this yaml to work as expected.
|
||||
{%- if world_version != "0.0.0" %}
|
||||
game:
|
||||
{{ yaml_dump(game) }}: {{ world_version }} # Version of the world required for this yaml to work as expected.
|
||||
{%- endif %}
|
||||
|
||||
{%- macro range_option(option) %}
|
||||
# You can define additional values between the minimum and maximum values.
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
author: Nintendo
|
||||
data: null
|
||||
game: A Link to the Past
|
||||
min_format_version: 1
|
||||
name: Link
|
||||
format_version: 1
|
||||
sprite_version: 1
|
||||
2
data/sprites/remote/.gitignore
vendored
2
data/sprites/remote/.gitignore
vendored
@@ -1,2 +0,0 @@
|
||||
*
|
||||
!.gitignore
|
||||
@@ -21,9 +21,6 @@
|
||||
# Aquaria
|
||||
/worlds/aquaria/ @tioui
|
||||
|
||||
# ArchipIDLE
|
||||
/worlds/archipidle/ @LegendaryLinux
|
||||
|
||||
# Blasphemous
|
||||
/worlds/blasphemous/ @TRPG0
|
||||
|
||||
@@ -42,9 +39,15 @@
|
||||
# Celeste 64
|
||||
/worlds/celeste64/ @PoryGone
|
||||
|
||||
# Celeste (Open World)
|
||||
/worlds/celeste_open_world/ @PoryGone
|
||||
|
||||
# ChecksFinder
|
||||
/worlds/checksfinder/ @SunCatMC
|
||||
|
||||
# Choo-Choo Charles
|
||||
/worlds/cccharles/ @Yaranorgoth
|
||||
|
||||
# Civilization VI
|
||||
/worlds/civ6/ @hesto2
|
||||
|
||||
@@ -69,6 +72,9 @@
|
||||
# Faxanadu
|
||||
/worlds/faxanadu/ @Daivuk
|
||||
|
||||
# Final Fantasy (1)
|
||||
/worlds/ff1/ @Rosalie-A
|
||||
|
||||
# Final Fantasy Mystic Quest
|
||||
/worlds/ffmq/ @Alchav @wildham0
|
||||
|
||||
@@ -238,9 +244,6 @@
|
||||
# compatibility, these worlds may be deleted. If you are interested in stepping up as maintainer for
|
||||
# any of these worlds, please review `/docs/world maintainer.md` documentation.
|
||||
|
||||
# Final Fantasy (1)
|
||||
# /worlds/ff1/
|
||||
|
||||
# Ocarina of Time
|
||||
# /worlds/oot/
|
||||
|
||||
|
||||
@@ -62,6 +62,24 @@ if possible.
|
||||
* If your client appears in the Archipelago Launcher, you may define an icon for it that differentiates it from
|
||||
other clients. The icon size is 48x48 pixels, but smaller or larger images will scale to that size.
|
||||
|
||||
### Launcher Integration
|
||||
|
||||
If you have a python client or want to utilize the integration features of the Archipelago Launcher (ex. Slot links in
|
||||
webhost) you can define a Component to be a part of the Launcher. `LauncherComponents.components` can be appended to
|
||||
with additional Components in order to automatically add them to the Launcher. Most Components only need a
|
||||
`display_name` and `func`, but `supports_uri` and `game_name` can be defined to support launching by webhost links,
|
||||
`icon` and `description` can be used to customize display in the Launcher UI, and `file_identifier` can be used to
|
||||
launch by file.
|
||||
|
||||
Additionally, if you use `func` you have access to LauncherComponent.launch or launch_subprocess to run your
|
||||
function as a subprocesses that can be utilized side by side other clients.
|
||||
```py
|
||||
def my_func(*args: str):
|
||||
from .client import run_client
|
||||
LauncherComponent.launch(run_client, name="My Client", args=args)
|
||||
```
|
||||
|
||||
|
||||
## World
|
||||
|
||||
The world is your game integration for the Archipelago generator, webhost, and multiworld server. It contains all the
|
||||
|
||||
@@ -1,26 +1,83 @@
|
||||
# apworld Specification
|
||||
# APWorld Specification
|
||||
|
||||
Archipelago depends on worlds to provide game-specific details like items, locations and output generation.
|
||||
Those are located in the `worlds/` folder (source) or `<install dir>/lib/worlds/` (when installed).
|
||||
These are called "APWorlds".
|
||||
They are located in the `worlds/` folder (source) or `<install dir>/lib/worlds/` (when installed).
|
||||
See [world api.md](world%20api.md) for details.
|
||||
APWorlds can either be a folder, or they can be packaged as an .apworld file.
|
||||
|
||||
apworld provides a way to package and ship a world that is not part of the main distribution by placing a `*.apworld`
|
||||
file into the worlds folder.
|
||||
## .apworld File Format
|
||||
|
||||
**Warning:** apworlds have to be all lower case, otherwise they raise a bogus Exception when trying to import in frozen python 3.10+!
|
||||
The `.apworld` file format provides a way to package and ship an APWorld that is not part of the main distribution
|
||||
by placing a `*.apworld` file into the worlds folder.
|
||||
|
||||
|
||||
## File Format
|
||||
|
||||
apworld files are zip archives, all lower case, with the file ending `.apworld`.
|
||||
`.apworld` files are zip archives, all lower case, with the file ending `.apworld`.
|
||||
The zip has to contain a folder with the same name as the zip, case-sensitive, that contains what would normally be in
|
||||
the world's folder in `worlds/`. I.e. `worlds/ror2.apworld` containing `ror2/__init__.py`.
|
||||
|
||||
**Warning:** `.apworld` files have to be all lower case,
|
||||
otherwise they raise a bogus Exception when trying to import in frozen python 3.10+!
|
||||
|
||||
## Metadata
|
||||
|
||||
No metadata is specified yet.
|
||||
Metadata about the APWorld is defined in an `archipelago.json` file.
|
||||
|
||||
If the APWorld is a folder, the only required field is "game":
|
||||
```json
|
||||
{
|
||||
"game": "Game Name"
|
||||
}
|
||||
```
|
||||
|
||||
There are also the following optional fields:
|
||||
* `minimum_ap_version` and `maximum_ap_version` - which if present will each be compared against the current
|
||||
Archipelago version respectively to filter those files from being loaded.
|
||||
* `world_version` - an arbitrary version for that world in order to only load the newest valid world.
|
||||
An APWorld without a world_version is always treated as older than one with a version
|
||||
(**Must** use exactly the format `"major.minor.build"`, e.g. `1.0.0`)
|
||||
* `authors` - a list of authors, to eventually be displayed in various user-facing places such as WebHost and
|
||||
package managers. Should always be a list of strings.
|
||||
|
||||
If the APWorld is packaged as an `.apworld` zip file, it also needs to have `version` and `compatible_version`,
|
||||
which refer to the version of the APContainer packaging scheme defined in [Files.py](../worlds/Files.py).
|
||||
These get automatically added to the `archipelago.json` of an .apworld if it is packaged using the
|
||||
["Build apworlds" launcher component](#build-apworlds-launcher-component),
|
||||
which is the correct way to package your `.apworld` as a world developer. Do not write these fields yourself.
|
||||
|
||||
### "Build apworlds" Launcher Component
|
||||
|
||||
In the Archipelago Launcher, there is a "Build apworlds" component that will package all world folders to `.apworld`,
|
||||
and add `archipelago.json` manifest files to them.
|
||||
These .apworld files will be output to `build/apworlds` (relative to the Archipelago root directory).
|
||||
The `archipelago.json` file in each .apworld will automatically include the appropriate
|
||||
`version` and `compatible_version`.
|
||||
|
||||
If a world folder has an `archipelago.json` in its root, any fields it contains will be carried over.
|
||||
So, a world folder with an `archipelago.json` that looks like this:
|
||||
|
||||
```json
|
||||
{
|
||||
"game": "Game Name",
|
||||
"minimum_ap_version": "0.6.4",
|
||||
"world_version": "2.1.4",
|
||||
"authors": ["NewSoupVi"]
|
||||
}
|
||||
```
|
||||
|
||||
will be packaged into an `.apworld` with a manifest file inside of it that looks like this:
|
||||
|
||||
```json
|
||||
{
|
||||
"minimum_ap_version": "0.6.4",
|
||||
"world_version": "2.1.4",
|
||||
"authors": ["NewSoupVi"],
|
||||
"version": 7,
|
||||
"compatible_version": 7,
|
||||
"game": "Game Name"
|
||||
}
|
||||
```
|
||||
|
||||
This is the recommended workflow for packaging your world to an `.apworld`.
|
||||
|
||||
## Extra Data
|
||||
|
||||
@@ -29,7 +86,7 @@ The zip can contain arbitrary files in addition what was specified above.
|
||||
|
||||
## Caveats
|
||||
|
||||
Imports from other files inside the apworld have to use relative imports. e.g. `from .options import MyGameOptions`
|
||||
Imports from other files inside the APWorld have to use relative imports. e.g. `from .options import MyGameOptions`
|
||||
|
||||
Imports from AP base have to use absolute imports, e.g. `from Options import Toggle` or
|
||||
`from worlds.AutoWorld import World`
|
||||
|
||||
@@ -16,7 +16,7 @@ game contributions:
|
||||
* **Do not introduce unit test failures/regressions.**
|
||||
Archipelago supports multiple versions of Python. You may need to download older Python versions to fully test
|
||||
your changes. Currently, the oldest supported version
|
||||
is [Python 3.10](https://www.python.org/downloads/release/python-31015/).
|
||||
is [Python 3.11](https://www.python.org/downloads/release/python-31113/).
|
||||
It is recommended that automated github actions are turned on in your fork to have github run unit tests after
|
||||
pushing.
|
||||
You can turn them on here:
|
||||
|
||||
@@ -352,14 +352,14 @@ direction_matching_group_lookup = {
|
||||
|
||||
Terrain matching or dungeon shuffle:
|
||||
```python
|
||||
def randomize_within_same_group(group: int) -> List[int]:
|
||||
def randomize_within_same_group(group: int) -> list[int]:
|
||||
return [group]
|
||||
identity_group_lookup = bake_target_group_lookup(world, randomize_within_same_group)
|
||||
```
|
||||
|
||||
Directional + area shuffle:
|
||||
```python
|
||||
def get_target_groups(group: int) -> List[int]:
|
||||
def get_target_groups(group: int) -> list[int]:
|
||||
# example group: LEFT | CAVE
|
||||
# example result: [RIGHT | CAVE, DOOR | CAVE]
|
||||
direction = group & Groups.DIRECTION_MASK
|
||||
|
||||
@@ -79,7 +79,7 @@ Sent to clients when they connect to an Archipelago server.
|
||||
| generator_version | [NetworkVersion](#NetworkVersion) | Object denoting the version of Archipelago which generated the multiworld. |
|
||||
| tags | list\[str\] | Denotes special features or capabilities that the sender is capable of. Example: `WebHost` |
|
||||
| password | bool | Denoted whether a password is required to join this room. |
|
||||
| permissions | dict\[str, [Permission](#Permission)\[int\]\] | Mapping of permission name to [Permission](#Permission), keys are: "release", "collect" and "remaining". |
|
||||
| permissions | dict\[str, [Permission](#Permission)\] | Mapping of permission name to [Permission](#Permission), keys are: "release", "collect" and "remaining". |
|
||||
| hint_cost | int | The percentage of total locations that need to be checked to receive a hint from the server. |
|
||||
| location_check_points | int | The amount of hint points you receive per item/location check completed. |
|
||||
| games | list\[str\] | List of games present in this multiworld. |
|
||||
@@ -662,13 +662,14 @@ class SlotType(enum.IntFlag):
|
||||
An object representing static information about a slot.
|
||||
|
||||
```python
|
||||
import typing
|
||||
from collections.abc import Sequence
|
||||
from typing import NamedTuple
|
||||
from NetUtils import SlotType
|
||||
class NetworkSlot(typing.NamedTuple):
|
||||
class NetworkSlot(NamedTuple):
|
||||
name: str
|
||||
game: str
|
||||
type: SlotType
|
||||
group_members: typing.List[int] = [] # only populated if type == group
|
||||
group_members: Sequence[int] = [] # only populated if type == group
|
||||
```
|
||||
|
||||
### Permission
|
||||
@@ -686,8 +687,8 @@ class Permission(enum.IntEnum):
|
||||
### Hint
|
||||
An object representing a Hint.
|
||||
```python
|
||||
import typing
|
||||
class Hint(typing.NamedTuple):
|
||||
from typing import NamedTuple
|
||||
class Hint(NamedTuple):
|
||||
receiving_player: int
|
||||
finding_player: int
|
||||
location: int
|
||||
|
||||
@@ -7,10 +7,10 @@ use that version. These steps are for developers or platforms without compiled r
|
||||
## General
|
||||
|
||||
What you'll need:
|
||||
* [Python 3.10.11 or newer](https://www.python.org/downloads/), not the Windows Store version
|
||||
* [Python 3.11.9 or newer](https://www.python.org/downloads/), not the Windows Store version
|
||||
* On Windows, please consider only using the latest supported version in production environments since security
|
||||
updates for older versions are not easily available.
|
||||
* Python 3.12.x is currently the newest supported version
|
||||
* Python 3.13.x is currently the newest supported version
|
||||
* pip: included in downloads from python.org, separate in many Linux distributions
|
||||
* Matching C compiler
|
||||
* possibly optional, read operating system specific sections
|
||||
|
||||
@@ -28,7 +28,7 @@ if it does not exist.
|
||||
## Global Settings
|
||||
|
||||
All non-world-specific settings are defined directly in settings.py.
|
||||
Each value needs to have a default. If the default should be `None`, define it as `typing.Optional` and assign `None`.
|
||||
Each value needs to have a default. If the default should be `None`, annotate it using `T | None = None`.
|
||||
|
||||
To access a "global" config value, with correct typing, use one of
|
||||
```python
|
||||
|
||||
18
docs/shared_cache.md
Normal file
18
docs/shared_cache.md
Normal file
@@ -0,0 +1,18 @@
|
||||
# Shared Cache
|
||||
|
||||
Archipelago maintains a shared folder of information that can be persisted for a machine and reused across Libraries.
|
||||
It can be found at the User Cache Directory for appname `Archipelago` in the `Cache` subfolder
|
||||
(ex. `%LOCALAPPDATA%/Archipelago/Cache`).
|
||||
|
||||
## Common Cache
|
||||
|
||||
The Common Cache `common.json` can be used to store any generic data that is expected to be shared across programs
|
||||
for the same User.
|
||||
|
||||
* `uuid`: A UUID identifier used to identify clients as from the same user/machine, to be sent in the Connect packet
|
||||
|
||||
## Data Package Cache
|
||||
|
||||
The `datapackage` folder in the shared cache folder is used to store datapackages by game and checksum to be reused
|
||||
in order to save network traffic. The expected structure is `datapackage/Game Name/checksum_value.json` with the
|
||||
contents of each json file being the no-whitespace datapackage contents.
|
||||
@@ -15,8 +15,10 @@
|
||||
* Prefer [format string literals](https://peps.python.org/pep-0498/) over string concatenation,
|
||||
use single quotes inside them: `f"Like {dct['key']}"`
|
||||
* Use type annotations where possible for function signatures and class members.
|
||||
* Use type annotations where appropriate for local variables (e.g. `var: List[int] = []`, or when the
|
||||
type is hard or impossible to deduce.) Clear annotations help developers look up and validate API calls.
|
||||
* Use type annotations where appropriate for local variables (e.g. `var: list[int] = []`, or when the
|
||||
type is hard or impossible to deduce). Clear annotations help developers look up and validate API calls.
|
||||
* Prefer new style type annotations for new code (e.g. `var: dict[str, str | int]` over
|
||||
`var: Dict[str, Union[str, int]]`).
|
||||
* If a line ends with an open bracket/brace/parentheses, the matching closing bracket should be at the
|
||||
beginning of a line at the same indentation as the beginning of the line with the open bracket.
|
||||
```python
|
||||
@@ -60,3 +62,9 @@
|
||||
* Indent `case` inside `switch ` with 2 spaces.
|
||||
* Use single quotes.
|
||||
* Semicolons are required after every statement.
|
||||
|
||||
## KV
|
||||
|
||||
* Style should be defined in `.kv` as much as possible, only Python when unavailable.
|
||||
* Should follow [our Python style](#python-code) where appropriate (quotation marks, indentation).
|
||||
* When escaping a line break, add a space between code and backslash.
|
||||
|
||||
@@ -82,10 +82,10 @@ overridden. For more information on what methods are available to your class, ch
|
||||
|
||||
#### Alternatives to WorldTestBase
|
||||
|
||||
Unit tests can also be created using [TestBase](/test/bases.py#L16) or
|
||||
[unittest.TestCase](https://docs.python.org/3/library/unittest.html#unittest.TestCase) depending on your use case. These
|
||||
may be useful for generating a multiworld under very specific constraints without using the generic world setup, or for
|
||||
testing portions of your code that can be tested without relying on a multiworld to be created first.
|
||||
Unit tests can also be created using
|
||||
[unittest.TestCase](https://docs.python.org/3/library/unittest.html#unittest.TestCase) directly. These may be useful
|
||||
for generating a multiworld under very specific constraints without using the generic world setup, or for testing
|
||||
portions of your code that can be tested without relying on a multiworld to be created first.
|
||||
|
||||
#### Parametrization
|
||||
|
||||
@@ -102,8 +102,7 @@ for multiple inputs) the base test. Some important things to consider when attem
|
||||
|
||||
* Classes inheriting from `WorldTestBase`, including those created by the helpers in `test.param`, will run all
|
||||
base tests by default, make sure the produced tests actually do what you aim for and do not waste a lot of
|
||||
extra CPU time. Consider using `TestBase` or `unittest.TestCase` directly
|
||||
or setting `WorldTestBase.run_default_tests` to False.
|
||||
extra CPU time. Consider using `unittest.TestCase` directly or setting `WorldTestBase.run_default_tests` to False.
|
||||
|
||||
#### Performance Considerations
|
||||
|
||||
|
||||
@@ -16,6 +16,10 @@ Current endpoints:
|
||||
- [`/status/<suuid:seed>`](#status)
|
||||
- Room API
|
||||
- [`/room_status/<suuid:room_id>`](#roomstatus)
|
||||
- Tracker API
|
||||
- [`/tracker/<suuid:tracker>`](#tracker)
|
||||
- [`/static_tracker/<suuid:tracker>`](#statictracker)
|
||||
- [`/slot_data_tracker/<suuid:tracker>`](#slotdatatracker)
|
||||
- User API
|
||||
- [`/get_rooms`](#getrooms)
|
||||
- [`/get_seeds`](#getseeds)
|
||||
@@ -244,6 +248,212 @@ Example:
|
||||
}
|
||||
```
|
||||
|
||||
## Tracker Endpoints
|
||||
Endpoints to fetch information regarding players of an active WebHost room with the supplied tracker_ID. The tracker ID
|
||||
can either be viewed while on a room tracker page, or from the [room's endpoint](#room-endpoints).
|
||||
|
||||
### `/tracker/<suuid:tracker>`
|
||||
<a name=tracker></a>
|
||||
Will provide a dict of tracker data with the following keys:
|
||||
|
||||
- Each player's current alias (`aliases`)
|
||||
- Will return the name if there is none
|
||||
- A list of items each player has received as a NetworkItem (`player_items_received`)
|
||||
- A list of checks done by each player as a list of the location id's (`player_checks_done`)
|
||||
- The total number of checks done by all players (`total_checks_done`)
|
||||
- Hints that players have used or received (`hints`)
|
||||
- The time of last activity of each player in RFC 1123 format (`activity_timers`)
|
||||
- The time of last active connection of each player in RFC 1123 format (`connection_timers`)
|
||||
- The current client status of each player (`player_status`)
|
||||
|
||||
Example:
|
||||
```json
|
||||
{
|
||||
"aliases": [
|
||||
{
|
||||
"team": 0,
|
||||
"player": 1,
|
||||
"alias": "Incompetence"
|
||||
},
|
||||
{
|
||||
"team": 0,
|
||||
"player": 2,
|
||||
"alias": "Slot_Name_2"
|
||||
}
|
||||
],
|
||||
"player_items_received": [
|
||||
{
|
||||
"team": 0,
|
||||
"player": 1,
|
||||
"items": [
|
||||
[1, 1, 1, 0],
|
||||
[2, 2, 2, 1]
|
||||
]
|
||||
},
|
||||
{
|
||||
"team": 0,
|
||||
"player": 2,
|
||||
"items": [
|
||||
[1, 1, 1, 2],
|
||||
[2, 2, 2, 0]
|
||||
]
|
||||
}
|
||||
],
|
||||
"player_checks_done": [
|
||||
{
|
||||
"team": 0,
|
||||
"player": 1,
|
||||
"locations": [
|
||||
1,
|
||||
2
|
||||
]
|
||||
},
|
||||
{
|
||||
"team": 0,
|
||||
"player": 2,
|
||||
"locations": [
|
||||
1,
|
||||
2
|
||||
]
|
||||
}
|
||||
],
|
||||
"total_checks_done": [
|
||||
{
|
||||
"team": 0,
|
||||
"checks_done": 4
|
||||
}
|
||||
],
|
||||
"hints": [
|
||||
{
|
||||
"team": 0,
|
||||
"player": 1,
|
||||
"hints": [
|
||||
[1, 2, 4, 6, 0, "", 4, 0]
|
||||
]
|
||||
},
|
||||
{
|
||||
"team": 0,
|
||||
"player": 2,
|
||||
"hints": []
|
||||
}
|
||||
],
|
||||
"activity_timers": [
|
||||
{
|
||||
"team": 0,
|
||||
"player": 1,
|
||||
"time": "Fri, 18 Apr 2025 20:35:45 GMT"
|
||||
},
|
||||
{
|
||||
"team": 0,
|
||||
"player": 2,
|
||||
"time": "Fri, 18 Apr 2025 20:42:46 GMT"
|
||||
}
|
||||
],
|
||||
"connection_timers": [
|
||||
{
|
||||
"team": 0,
|
||||
"player": 1,
|
||||
"time": "Fri, 18 Apr 2025 20:38:25 GMT"
|
||||
},
|
||||
{
|
||||
"team": 0,
|
||||
"player": 2,
|
||||
"time": "Fri, 18 Apr 2025 21:03:00 GMT"
|
||||
}
|
||||
],
|
||||
"player_status": [
|
||||
{
|
||||
"team": 0,
|
||||
"player": 1,
|
||||
"status": 0
|
||||
},
|
||||
{
|
||||
"team": 0,
|
||||
"player": 2,
|
||||
"status": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### `/static_tracker/<suuid:tracker>`
|
||||
<a name=statictracker></a>
|
||||
Will provide a dict of static tracker data with the following keys:
|
||||
|
||||
- item_link groups and their players (`groups`)
|
||||
- The datapackage hash for each game (`datapackage`)
|
||||
- This hash can then be sent to the datapackage API to receive the appropriate datapackage as necessary
|
||||
- The number of checks found vs. total checks available per player (`player_locations_total`)
|
||||
- Same logic as the multitracker template: found = len(player_checks_done.locations) / total = player_locations_total.total_locations (all available checks).
|
||||
|
||||
Example:
|
||||
```json
|
||||
{
|
||||
"groups": [
|
||||
{
|
||||
"slot": 5,
|
||||
"name": "testGroup",
|
||||
"members": [
|
||||
1,
|
||||
2
|
||||
]
|
||||
},
|
||||
{
|
||||
"slot": 6,
|
||||
"name": "myCoolLink",
|
||||
"members": [
|
||||
3,
|
||||
4
|
||||
]
|
||||
}
|
||||
],
|
||||
"datapackage": {
|
||||
"Archipelago": {
|
||||
"checksum": "ac9141e9ad0318df2fa27da5f20c50a842afeecb",
|
||||
},
|
||||
"The Messenger": {
|
||||
"checksum": "6991cbcda7316b65bcb072667f3ee4c4cae71c0b",
|
||||
}
|
||||
},
|
||||
"player_locations_total": [
|
||||
{
|
||||
"player": 1,
|
||||
"team" : 0,
|
||||
"total_locations": 10
|
||||
},
|
||||
{
|
||||
"player": 2,
|
||||
"team" : 0,
|
||||
"total_locations": 20
|
||||
}
|
||||
],
|
||||
}
|
||||
```
|
||||
|
||||
### `/slot_data_tracker/<suuid:tracker>`
|
||||
<a name=slotdatatracker></a>
|
||||
Will provide a list of each player's slot_data.
|
||||
|
||||
Example:
|
||||
```json
|
||||
[
|
||||
{
|
||||
"player": 1,
|
||||
"slot_data": {
|
||||
"example_option": 1,
|
||||
"other_option": 3
|
||||
}
|
||||
},
|
||||
{
|
||||
"player": 2,
|
||||
"slot_data": {
|
||||
"example_option": 1,
|
||||
"other_option": 2
|
||||
}
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
## User Endpoints
|
||||
User endpoints can get room and seed details from the current session tokens (cookies)
|
||||
|
||||
|
||||
@@ -76,8 +76,8 @@ webhost:
|
||||
* `game_info_languages` (optional) list of strings for defining the existing game info pages your game supports. The
|
||||
documents must be prefixed with the same string as defined here. Default already has 'en'.
|
||||
|
||||
* `options_presets` (optional) `Dict[str, Dict[str, Any]]` where the keys are the names of the presets and the values
|
||||
are the options to be set for that preset. The options are defined as a `Dict[str, Any]` where the keys are the names
|
||||
* `options_presets` (optional) `dict[str, dict[str, Any]]` where the keys are the names of the presets and the values
|
||||
are the options to be set for that preset. The options are defined as a `dict[str, Any]` where the keys are the names
|
||||
of the options and the values are the values to be set for that option. These presets will be available for users to
|
||||
select from on the game's options page.
|
||||
|
||||
@@ -257,6 +257,14 @@ another flag like "progression", it means "an especially useful progression item
|
||||
combined with `progression`; see below)
|
||||
* `progression_skip_balancing`: the combination of `progression` and `skip_balancing`, i.e., a progression item that
|
||||
will not be moved around by progression balancing; used, e.g., for currency or tokens, to not flood early spheres
|
||||
* `deprioritized`: denotes that an item should not be placed on priority locations
|
||||
(to be combined with `progression`; see below)
|
||||
* `progression_deprioritized`: the combination of `progression` and `deprioritized`, i.e. a progression item that
|
||||
should not be placed on priority locations, despite being progression;
|
||||
like skip_balancing, this is commonly used for currency or tokens.
|
||||
* `progression_deprioritized_skip_balancing`: the combination of `progression`, `deprioritized` and `skip_balancing`.
|
||||
Since there is overlap between the kind of items that want `skip_balancing` and `deprioritized`,
|
||||
this combined classification exists for convenience
|
||||
|
||||
### Regions
|
||||
|
||||
@@ -745,7 +753,7 @@ from BaseClasses import CollectionState, MultiWorld
|
||||
from worlds.AutoWorld import LogicMixin
|
||||
|
||||
class MyGameState(LogicMixin):
|
||||
mygame_defeatable_enemies: Dict[int, Set[str]] # per player
|
||||
mygame_defeatable_enemies: dict[int, set[str]] # per player
|
||||
|
||||
def init_mixin(self, multiworld: MultiWorld) -> None:
|
||||
# Initialize per player with the corresponding "nothing" value, such as 0 or an empty set.
|
||||
@@ -874,11 +882,11 @@ item/location pairs is unnecessary since the AP server already retains and freel
|
||||
that request it. The most common usage of slot data is sending option results that the client needs to be aware of.
|
||||
|
||||
```python
|
||||
def fill_slot_data(self) -> Dict[str, Any]:
|
||||
def fill_slot_data(self) -> dict[str, Any]:
|
||||
# In order for our game client to handle the generated seed correctly we need to know what the user selected
|
||||
# for their difficulty and final boss HP.
|
||||
# A dictionary returned from this method gets set as the slot_data and will be sent to the client after connecting.
|
||||
# The options dataclass has a method to return a `Dict[str, Any]` of each option name provided and the relevant
|
||||
# The options dataclass has a method to return a `dict[str, Any]` of each option name provided and the relevant
|
||||
# option's value.
|
||||
return self.options.as_dict("difficulty", "final_boss_hp")
|
||||
```
|
||||
|
||||
@@ -74,13 +74,12 @@ class EntranceLookup:
|
||||
if entrance in self._expands_graph_cache:
|
||||
return self._expands_graph_cache[entrance]
|
||||
|
||||
visited = set()
|
||||
seen = {entrance.connected_region}
|
||||
q: deque[Region] = deque()
|
||||
q.append(entrance.connected_region)
|
||||
|
||||
while q:
|
||||
region = q.popleft()
|
||||
visited.add(region)
|
||||
|
||||
# check if the region itself is progression
|
||||
if region in region.multiworld.indirect_connections:
|
||||
@@ -103,7 +102,8 @@ class EntranceLookup:
|
||||
and exit_ in self._usable_exits):
|
||||
self._expands_graph_cache[entrance] = True
|
||||
return True
|
||||
elif exit_.connected_region and exit_.connected_region not in visited:
|
||||
elif exit_.connected_region and exit_.connected_region not in seen:
|
||||
seen.add(exit_.connected_region)
|
||||
q.append(exit_.connected_region)
|
||||
|
||||
self._expands_graph_cache[entrance] = False
|
||||
|
||||
@@ -180,8 +180,8 @@ Root: HKCR; Subkey: "{#MyAppName}mm2patch\shell\open\command"; ValueData: """{a
|
||||
|
||||
Root: HKCR; Subkey: ".apladx"; ValueData: "{#MyAppName}ladxpatch"; Flags: uninsdeletevalue; ValueType: string; ValueName: "";
|
||||
Root: HKCR; Subkey: "{#MyAppName}ladxpatch"; ValueData: "Archipelago Links Awakening DX Patch"; Flags: uninsdeletekey; ValueType: string; ValueName: "";
|
||||
Root: HKCR; Subkey: "{#MyAppName}ladxpatch\DefaultIcon"; ValueData: "{app}\ArchipelagoLinksAwakeningClient.exe,0"; ValueType: string; ValueName: "";
|
||||
Root: HKCR; Subkey: "{#MyAppName}ladxpatch\shell\open\command"; ValueData: """{app}\ArchipelagoLinksAwakeningClient.exe"" ""%1"""; ValueType: string; ValueName: "";
|
||||
Root: HKCR; Subkey: "{#MyAppName}ladxpatch\DefaultIcon"; ValueData: "{app}\ArchipelagoLauncher.exe,0"; ValueType: string; ValueName: "";
|
||||
Root: HKCR; Subkey: "{#MyAppName}ladxpatch\shell\open\command"; ValueData: """{app}\ArchipelagoLauncher.exe"" ""%1"""; ValueType: string; ValueName: "";
|
||||
|
||||
Root: HKCR; Subkey: ".aptloz"; ValueData: "{#MyAppName}tlozpatch"; Flags: uninsdeletevalue; ValueType: string; ValueName: "";
|
||||
Root: HKCR; Subkey: "{#MyAppName}tlozpatch"; ValueData: "Archipelago The Legend of Zelda Patch"; Flags: uninsdeletekey; ValueType: string; ValueName: "";
|
||||
|
||||
25
kvui.py
25
kvui.py
@@ -34,6 +34,17 @@ from kivy.config import Config
|
||||
Config.set("input", "mouse", "mouse,disable_multitouch")
|
||||
Config.set("kivy", "exit_on_escape", "0")
|
||||
Config.set("graphics", "multisamples", "0") # multisamples crash old intel drivers
|
||||
|
||||
# Workaround for an issue where importing kivy.core.window before loading sounds
|
||||
# will hang the whole application on Linux once the first sound is loaded.
|
||||
# kivymd imports kivy.core.window, so we have to do this before the first kivymd import.
|
||||
# No longer necessary when we switch to kivy 3.0.0, which fixes this issue.
|
||||
from kivy.core.audio import SoundLoader
|
||||
for classobj in SoundLoader._classes:
|
||||
# The least invasive way to force a SoundLoader class to load its audio engine seems to be calling
|
||||
# .extensions(), which e.g. in audio_sdl2.pyx then calls a function called "mix_init()"
|
||||
classobj.extensions()
|
||||
|
||||
from kivymd.uix.divider import MDDivider
|
||||
from kivy.core.window import Window
|
||||
from kivy.core.clipboard import Clipboard
|
||||
@@ -720,13 +731,11 @@ class MessageBoxLabel(MDLabel):
|
||||
|
||||
|
||||
class MessageBox(Popup):
|
||||
|
||||
def __init__(self, title, text, error=False, **kwargs):
|
||||
label = MessageBoxLabel(text=text)
|
||||
label = MessageBoxLabel(text=text, padding=("6dp", "0dp"))
|
||||
separator_color = [217 / 255, 129 / 255, 122 / 255, 1.] if error else [47 / 255., 167 / 255., 212 / 255, 1.]
|
||||
super().__init__(title=title, content=label, size_hint=(0.5, None), width=max(100, int(label.width) + 40),
|
||||
separator_color=separator_color, **kwargs)
|
||||
self.height += max(0, label.height - 18)
|
||||
|
||||
|
||||
class MDNavigationItemBase(MDNavigationItem):
|
||||
@@ -840,15 +849,15 @@ class GameManager(ThemedApp):
|
||||
self.log_panels: typing.Dict[str, Widget] = {}
|
||||
|
||||
# keep track of last used command to autofill on click
|
||||
self.last_autofillable_command = "hint"
|
||||
autofillable_commands = ("hint_location", "hint", "getitem")
|
||||
self.last_autofillable_command = "!hint"
|
||||
autofillable_commands = ("!hint_location", "!hint", "!getitem")
|
||||
original_say = ctx.on_user_say
|
||||
|
||||
def intercept_say(text):
|
||||
text = original_say(text)
|
||||
if text:
|
||||
for command in autofillable_commands:
|
||||
if text.startswith("!" + command):
|
||||
if text.startswith(command):
|
||||
self.last_autofillable_command = command
|
||||
break
|
||||
return text
|
||||
@@ -1101,10 +1110,6 @@ class GameManager(ThemedApp):
|
||||
hints = self.ctx.stored_data.get(f"_read_hints_{self.ctx.team}_{self.ctx.slot}", [])
|
||||
self.hint_log.refresh_hints(hints)
|
||||
|
||||
# default F1 keybind, opens a settings menu, that seems to break the layout engine once closed
|
||||
def open_settings(self, *largs):
|
||||
pass
|
||||
|
||||
|
||||
class LogtoUI(logging.Handler):
|
||||
def __init__(self, on_log):
|
||||
|
||||
16
ruff.toml
Normal file
16
ruff.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
line-length = 120
|
||||
indent-width = 4
|
||||
target-version = "py311"
|
||||
|
||||
[lint]
|
||||
select = ["B", "C", "E", "F", "W", "I", "N", "Q", "UP", "RET", "RSE", "RUF", "ISC", "PLC", "PLE", "PLW", "T20", "PERF"]
|
||||
ignore = [
|
||||
"B011", # In AP, the use of assert False is essential because we optimise out these statements for release builds.
|
||||
"C901", # Author disagrees with limiting branch complexity
|
||||
"N818", # Author agrees with this rule, but Core AP violates this and changing it would be a hassle.
|
||||
"PLC0415", # In AP, we consider local imports totally fine & necessary
|
||||
"PLC1802", # Author agrees with this rule, but it literally changes the functionality of the code, which is unsafe.
|
||||
"PLC1901", # This is just not equivalent
|
||||
"PLE1141", # Gives false positives when the dict keys are tuples, but does not mention this in the suggested fix.
|
||||
"UP015", # Explicit is better than implicit, so we'd prefer to keep "r" in open() calls.
|
||||
]
|
||||
12
settings.py
12
settings.py
@@ -579,6 +579,17 @@ class ServerOptions(Group):
|
||||
"goal" -> Client can ask for remaining items after goal completion
|
||||
"""
|
||||
|
||||
class CountdownMode(str):
|
||||
"""
|
||||
Countdown modes
|
||||
Determines whether or not a player can initiate a countdown with !countdown
|
||||
Note that /countdown is always available to the host.
|
||||
|
||||
"enabled" -> Client can always initiate a countdown with !countdown.
|
||||
"disabled" -> Client can never initiate a countdown with !countdown.
|
||||
"auto" -> !countdown will be available for any room with less than 30 slots.
|
||||
"""
|
||||
|
||||
class AutoShutdown(int):
|
||||
"""Automatically shut down the server after this many seconds without new location checks, 0 to keep running"""
|
||||
|
||||
@@ -613,6 +624,7 @@ class ServerOptions(Group):
|
||||
release_mode: ReleaseMode = ReleaseMode("auto")
|
||||
collect_mode: CollectMode = CollectMode("auto")
|
||||
remaining_mode: RemainingMode = RemainingMode("goal")
|
||||
countdown_mode: CountdownMode = CountdownMode("auto")
|
||||
auto_shutdown: AutoShutdown = AutoShutdown(0)
|
||||
compatibility: Compatibility = Compatibility(2)
|
||||
log_network: LogNetwork = LogNetwork(0)
|
||||
|
||||
44
setup.py
44
setup.py
@@ -22,7 +22,7 @@ SNI_VERSION = "v0.0.100" # change back to "latest" once tray icon issues are fi
|
||||
|
||||
|
||||
# This is a bit jank. We need cx-Freeze to be able to run anything from this script, so install it
|
||||
requirement = 'cx-Freeze==8.0.0'
|
||||
requirement = 'cx-Freeze==8.4.0'
|
||||
try:
|
||||
import pkg_resources
|
||||
try:
|
||||
@@ -30,7 +30,7 @@ try:
|
||||
install_cx_freeze = False
|
||||
except pkg_resources.ResolutionError:
|
||||
install_cx_freeze = True
|
||||
except ImportError:
|
||||
except (AttributeError, ImportError):
|
||||
install_cx_freeze = True
|
||||
pkg_resources = None # type: ignore[assignment]
|
||||
|
||||
@@ -65,7 +65,6 @@ from Cython.Build import cythonize
|
||||
non_apworlds: set[str] = {
|
||||
"A Link to the Past",
|
||||
"Adventure",
|
||||
"ArchipIDLE",
|
||||
"Archipelago",
|
||||
"Lufia II Ancient Cave",
|
||||
"Meritous",
|
||||
@@ -147,7 +146,16 @@ def download_SNI() -> None:
|
||||
|
||||
signtool: str | None = None
|
||||
try:
|
||||
with urllib.request.urlopen('http://192.168.206.4:12345/connector/status') as response:
|
||||
import socket
|
||||
|
||||
sign_host, sign_port = "192.168.206.4", 12345
|
||||
# check if the sign_host is on a local network
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
s.connect((sign_host, sign_port))
|
||||
if s.getsockname()[0].rsplit(".", 1)[0] != sign_host.rsplit(".", 1)[0]:
|
||||
raise ConnectionError() # would go through default route
|
||||
# configure signtool
|
||||
with urllib.request.urlopen(f"http://{sign_host}:{sign_port}/connector/status") as response:
|
||||
html = response.read()
|
||||
if b"status=OK\n" in html:
|
||||
signtool = (r'signtool sign /sha1 6df76fe776b82869a5693ddcb1b04589cffa6faf /fd sha256 /td sha256 '
|
||||
@@ -372,6 +380,7 @@ class BuildExeCommand(cx_Freeze.command.build_exe.build_exe):
|
||||
os.makedirs(self.buildfolder / "Players" / "Templates", exist_ok=True)
|
||||
from Options import generate_yaml_templates
|
||||
from worlds.AutoWorld import AutoWorldRegister
|
||||
from worlds.Files import APWorldContainer
|
||||
assert not non_apworlds - set(AutoWorldRegister.world_types), \
|
||||
f"Unknown world {non_apworlds - set(AutoWorldRegister.world_types)} designated for .apworld"
|
||||
folders_to_remove: list[str] = []
|
||||
@@ -380,13 +389,36 @@ class BuildExeCommand(cx_Freeze.command.build_exe.build_exe):
|
||||
if worldname not in non_apworlds:
|
||||
file_name = os.path.split(os.path.dirname(worldtype.__file__))[1]
|
||||
world_directory = self.libfolder / "worlds" / file_name
|
||||
if os.path.isfile(world_directory / "archipelago.json"):
|
||||
with open(os.path.join(world_directory, "archipelago.json"), mode="r", encoding="utf-8") as manifest_file:
|
||||
manifest = json.load(manifest_file)
|
||||
|
||||
assert "game" in manifest, (
|
||||
f"World directory {world_directory} has an archipelago.json manifest file, but it"
|
||||
"does not define a \"game\"."
|
||||
)
|
||||
assert manifest["game"] == worldtype.game, (
|
||||
f"World directory {world_directory} has an archipelago.json manifest file, but value of the"
|
||||
f"\"game\" field ({manifest['game']} does not equal the World class's game ({worldtype.game})."
|
||||
)
|
||||
else:
|
||||
manifest = {}
|
||||
# this method creates an apworld that cannot be moved to a different OS or minor python version,
|
||||
# which should be ok
|
||||
with zipfile.ZipFile(self.libfolder / "worlds" / (file_name + ".apworld"), "x", zipfile.ZIP_DEFLATED,
|
||||
zip_path = self.libfolder / "worlds" / (file_name + ".apworld")
|
||||
apworld = APWorldContainer(str(zip_path))
|
||||
apworld.minimum_ap_version = version_tuple
|
||||
apworld.maximum_ap_version = version_tuple
|
||||
apworld.game = worldtype.game
|
||||
manifest.update(apworld.get_manifest())
|
||||
apworld.manifest_path = f"{file_name}/archipelago.json"
|
||||
with zipfile.ZipFile(zip_path, "x", zipfile.ZIP_DEFLATED,
|
||||
compresslevel=9) as zf:
|
||||
for path in world_directory.rglob("*.*"):
|
||||
relative_path = os.path.join(*path.parts[path.parts.index("worlds")+1:])
|
||||
zf.write(path, relative_path)
|
||||
if not relative_path.endswith("archipelago.json"):
|
||||
zf.write(path, relative_path)
|
||||
zf.writestr(apworld.manifest_path, json.dumps(manifest))
|
||||
folders_to_remove.append(file_name)
|
||||
shutil.rmtree(world_directory)
|
||||
shutil.copyfile("meta.yaml", self.buildfolder / "Players" / "Templates" / "meta.yaml")
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
from .bases import TestBase, WorldTestBase
|
||||
from warnings import warn
|
||||
warn("TestBase was renamed to bases", DeprecationWarning)
|
||||
@@ -9,98 +9,7 @@ from test.general import gen_steps
|
||||
from worlds import AutoWorld
|
||||
from worlds.AutoWorld import World, call_all
|
||||
|
||||
from BaseClasses import Location, MultiWorld, CollectionState, ItemClassification, Item
|
||||
from worlds.alttp.Items import item_factory
|
||||
|
||||
|
||||
class TestBase(unittest.TestCase):
|
||||
multiworld: MultiWorld
|
||||
_state_cache = {}
|
||||
|
||||
def get_state(self, items):
|
||||
if (self.multiworld, tuple(items)) in self._state_cache:
|
||||
return self._state_cache[self.multiworld, tuple(items)]
|
||||
state = CollectionState(self.multiworld)
|
||||
for item in items:
|
||||
item.classification = ItemClassification.progression
|
||||
state.collect(item, prevent_sweep=True)
|
||||
state.sweep_for_advancements()
|
||||
state.update_reachable_regions(1)
|
||||
self._state_cache[self.multiworld, tuple(items)] = state
|
||||
return state
|
||||
|
||||
def get_path(self, state, region):
|
||||
def flist_to_iter(node):
|
||||
while node:
|
||||
value, node = node
|
||||
yield value
|
||||
|
||||
from itertools import zip_longest
|
||||
reversed_path_as_flist = state.path.get(region, (region, None))
|
||||
string_path_flat = reversed(list(map(str, flist_to_iter(reversed_path_as_flist))))
|
||||
# Now we combine the flat string list into (region, exit) pairs
|
||||
pathsiter = iter(string_path_flat)
|
||||
pathpairs = zip_longest(pathsiter, pathsiter)
|
||||
return list(pathpairs)
|
||||
|
||||
def run_location_tests(self, access_pool):
|
||||
for i, (location, access, *item_pool) in enumerate(access_pool):
|
||||
items = item_pool[0]
|
||||
all_except = item_pool[1] if len(item_pool) > 1 else None
|
||||
state = self._get_items(item_pool, all_except)
|
||||
path = self.get_path(state, self.multiworld.get_location(location, 1).parent_region)
|
||||
with self.subTest(msg="Reach Location", location=location, access=access, items=items,
|
||||
all_except=all_except, path=path, entry=i):
|
||||
|
||||
self.assertEqual(self.multiworld.get_location(location, 1).can_reach(state), access,
|
||||
f"failed {self.multiworld.get_location(location, 1)} with: {item_pool}")
|
||||
|
||||
# check for partial solution
|
||||
if not all_except and access: # we are not supposed to be able to reach location with partial inventory
|
||||
for missing_item in item_pool[0]:
|
||||
with self.subTest(msg="Location reachable without required item", location=location,
|
||||
items=item_pool[0], missing_item=missing_item, entry=i):
|
||||
state = self._get_items_partial(item_pool, missing_item)
|
||||
|
||||
self.assertEqual(self.multiworld.get_location(location, 1).can_reach(state), False,
|
||||
f"failed {self.multiworld.get_location(location, 1)}: succeeded with "
|
||||
f"{missing_item} removed from: {item_pool}")
|
||||
|
||||
def run_entrance_tests(self, access_pool):
|
||||
for i, (entrance, access, *item_pool) in enumerate(access_pool):
|
||||
items = item_pool[0]
|
||||
all_except = item_pool[1] if len(item_pool) > 1 else None
|
||||
state = self._get_items(item_pool, all_except)
|
||||
path = self.get_path(state, self.multiworld.get_entrance(entrance, 1).parent_region)
|
||||
with self.subTest(msg="Reach Entrance", entrance=entrance, access=access, items=items,
|
||||
all_except=all_except, path=path, entry=i):
|
||||
|
||||
self.assertEqual(self.multiworld.get_entrance(entrance, 1).can_reach(state), access)
|
||||
|
||||
# check for partial solution
|
||||
if not all_except and access: # we are not supposed to be able to reach location with partial inventory
|
||||
for missing_item in item_pool[0]:
|
||||
with self.subTest(msg="Entrance reachable without required item", entrance=entrance,
|
||||
items=item_pool[0], missing_item=missing_item, entry=i):
|
||||
state = self._get_items_partial(item_pool, missing_item)
|
||||
self.assertEqual(self.multiworld.get_entrance(entrance, 1).can_reach(state), False,
|
||||
f"failed {self.multiworld.get_entrance(entrance, 1)} with: {item_pool}")
|
||||
|
||||
def _get_items(self, item_pool, all_except):
|
||||
if all_except and len(all_except) > 0:
|
||||
items = self.multiworld.itempool[:]
|
||||
items = [item for item in items if
|
||||
item.name not in all_except and not ("Bottle" in item.name and "AnyBottle" in all_except)]
|
||||
items.extend(item_factory(item_pool[0], self.multiworld.worlds[1]))
|
||||
else:
|
||||
items = item_factory(item_pool[0], self.multiworld.worlds[1])
|
||||
return self.get_state(items)
|
||||
|
||||
def _get_items_partial(self, item_pool, missing_item):
|
||||
new_items = item_pool[0].copy()
|
||||
new_items.remove(missing_item)
|
||||
items = item_factory(new_items, self.multiworld.worlds[1])
|
||||
return self.get_state(items)
|
||||
from BaseClasses import Location, MultiWorld, CollectionState, Item
|
||||
|
||||
|
||||
class WorldTestBase(unittest.TestCase):
|
||||
|
||||
0
test/benchmark/compression/__init__.py
Normal file
0
test/benchmark/compression/__init__.py
Normal file
227
test/benchmark/compression/benchmark.py
Normal file
227
test/benchmark/compression/benchmark.py
Normal file
@@ -0,0 +1,227 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# based on python-websockets compression benchmark (c) Aymeric Augustin and contributors
|
||||
# https://github.com/python-websockets/websockets/blob/main/experiments/compression/benchmark.py
|
||||
|
||||
import collections
|
||||
import time
|
||||
import zlib
|
||||
from typing import Iterable
|
||||
|
||||
|
||||
REPEAT = 10
|
||||
|
||||
WB, ML = 12, 5 # defaults used as a reference
|
||||
WBITS = range(9, 16)
|
||||
MEMLEVELS = range(1, 10)
|
||||
|
||||
|
||||
def benchmark(data: Iterable[bytes]) -> None:
|
||||
size: dict[int, dict[int, float]] = collections.defaultdict(dict)
|
||||
duration: dict[int, dict[int, float]] = collections.defaultdict(dict)
|
||||
|
||||
for wbits in WBITS:
|
||||
for memLevel in MEMLEVELS:
|
||||
encoder = zlib.compressobj(wbits=-wbits, memLevel=memLevel)
|
||||
encoded = []
|
||||
|
||||
print(f"Compressing {REPEAT} times with {wbits=} and {memLevel=}")
|
||||
|
||||
t0 = time.perf_counter()
|
||||
|
||||
for _ in range(REPEAT):
|
||||
for item in data:
|
||||
# Taken from PerMessageDeflate.encode
|
||||
item = encoder.compress(item) + encoder.flush(zlib.Z_SYNC_FLUSH)
|
||||
if item.endswith(b"\x00\x00\xff\xff"):
|
||||
item = item[:-4]
|
||||
encoded.append(item)
|
||||
|
||||
t1 = time.perf_counter()
|
||||
|
||||
size[wbits][memLevel] = sum(len(item) for item in encoded) / REPEAT
|
||||
duration[wbits][memLevel] = (t1 - t0) / REPEAT
|
||||
|
||||
raw_size = sum(len(item) for item in data)
|
||||
|
||||
print("=" * 79)
|
||||
print("Compression ratio")
|
||||
print("=" * 79)
|
||||
print("\t".join(["wb \\ ml"] + [str(memLevel) for memLevel in MEMLEVELS]))
|
||||
for wbits in WBITS:
|
||||
print(
|
||||
"\t".join(
|
||||
[str(wbits)]
|
||||
+ [
|
||||
f"{100 * (1 - size[wbits][memLevel] / raw_size):.1f}%"
|
||||
for memLevel in MEMLEVELS
|
||||
]
|
||||
)
|
||||
)
|
||||
print("=" * 79)
|
||||
print()
|
||||
|
||||
print("=" * 79)
|
||||
print("CPU time")
|
||||
print("=" * 79)
|
||||
print("\t".join(["wb \\ ml"] + [str(memLevel) for memLevel in MEMLEVELS]))
|
||||
for wbits in WBITS:
|
||||
print(
|
||||
"\t".join(
|
||||
[str(wbits)]
|
||||
+ [
|
||||
f"{1000 * duration[wbits][memLevel]:.1f}ms"
|
||||
for memLevel in MEMLEVELS
|
||||
]
|
||||
)
|
||||
)
|
||||
print("=" * 79)
|
||||
print()
|
||||
|
||||
print("=" * 79)
|
||||
print(f"Size vs. {WB} \\ {ML}")
|
||||
print("=" * 79)
|
||||
print("\t".join(["wb \\ ml"] + [str(memLevel) for memLevel in MEMLEVELS]))
|
||||
for wbits in WBITS:
|
||||
print(
|
||||
"\t".join(
|
||||
[str(wbits)]
|
||||
+ [
|
||||
f"{100 * (size[wbits][memLevel] / size[WB][ML] - 1):.1f}%"
|
||||
for memLevel in MEMLEVELS
|
||||
]
|
||||
)
|
||||
)
|
||||
print("=" * 79)
|
||||
print()
|
||||
|
||||
print("=" * 79)
|
||||
print(f"Time vs. {WB} \\ {ML}")
|
||||
print("=" * 79)
|
||||
print("\t".join(["wb \\ ml"] + [str(memLevel) for memLevel in MEMLEVELS]))
|
||||
for wbits in WBITS:
|
||||
print(
|
||||
"\t".join(
|
||||
[str(wbits)]
|
||||
+ [
|
||||
f"{100 * (duration[wbits][memLevel] / duration[WB][ML] - 1):.1f}%"
|
||||
for memLevel in MEMLEVELS
|
||||
]
|
||||
)
|
||||
)
|
||||
print("=" * 79)
|
||||
print()
|
||||
|
||||
|
||||
def generate_data_package_corpus() -> list[bytes]:
|
||||
# compared to default 12, 5:
|
||||
# 11, 4 saves 16K RAM, gives +4.6% size, -5.0% time .. +1.1% time
|
||||
# 10, 4 saves 20K RAM, gives +10.2% size, -3.8% time .. +0.6% time
|
||||
# 11, 3 saves 20K RAM, gives +6.5% size, +14.2% time
|
||||
# 10, 3 saves 24K RAM, gives +12.8% size, +0.5% time .. +6.9% time
|
||||
# NOTE: time delta is highly unstable; time is ~100ms
|
||||
import warnings
|
||||
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore")
|
||||
|
||||
from NetUtils import encode
|
||||
from worlds import network_data_package
|
||||
|
||||
return [encode(network_data_package).encode("utf-8")]
|
||||
|
||||
|
||||
def generate_solo_release_corpus() -> list[bytes]:
|
||||
# compared to default 12, 5:
|
||||
# 11, 4 saves 16K RAM, gives +0.9% size, +3.9% time
|
||||
# 10, 4 saves 20K RAM, gives +1.4% size, +3.4% time
|
||||
# 11, 3 saves 20K RAM, gives +1.8% size, +13.9% time
|
||||
# 10, 3 saves 24K RAM, gives +2.1% size, +4.8% time
|
||||
# NOTE: time delta is highly unstable; time is ~0.4ms
|
||||
|
||||
from random import Random
|
||||
from MultiServer import json_format_send_event
|
||||
from NetUtils import encode, NetworkItem
|
||||
|
||||
r = Random()
|
||||
r.seed(0)
|
||||
solo_release = []
|
||||
solo_release_locations = [r.randint(1000, 1999) for _ in range(200)]
|
||||
solo_release_items = sorted([r.randint(1000, 1999) for _ in range(200)]) # currently sorted by item
|
||||
solo_player = 1
|
||||
for location, item in zip(solo_release_locations, solo_release_items):
|
||||
flags = r.choice((0, 0, 0, 0, 0, 0, 0, 1, 2, 3))
|
||||
network_item = NetworkItem(item, location, solo_player, flags)
|
||||
solo_release.append(json_format_send_event(network_item, solo_player))
|
||||
solo_release.append({
|
||||
"cmd": "ReceivedItems",
|
||||
"index": 0,
|
||||
"items": solo_release_items,
|
||||
})
|
||||
solo_release.append({
|
||||
"cmd": "RoomUpdate",
|
||||
"hint_points": 200,
|
||||
"checked_locations": solo_release_locations,
|
||||
})
|
||||
return [encode(solo_release).encode("utf-8")]
|
||||
|
||||
|
||||
def generate_gameplay_corpus() -> list[bytes]:
|
||||
# compared to default 12, 5:
|
||||
# 11, 4 saves 16K RAM, gives +13.6% size, +4.1% time
|
||||
# 10, 4 saves 20K RAM, gives +22.3% size, +2.2% time
|
||||
# 10, 3 saves 24K RAM, gives +26.2% size, +1.6% time
|
||||
# NOTE: time delta is highly unstable; time is 4ms
|
||||
|
||||
from copy import copy
|
||||
from random import Random
|
||||
from MultiServer import json_format_send_event
|
||||
from NetUtils import encode, NetworkItem
|
||||
|
||||
r = Random()
|
||||
r.seed(0)
|
||||
gameplay = []
|
||||
observer = 1
|
||||
hint_points = 0
|
||||
index = 0
|
||||
players = list(range(1, 10))
|
||||
player_locations = {player: [r.randint(1000, 1999) for _ in range(200)] for player in players}
|
||||
player_items = {player: [r.randint(1000, 1999) for _ in range(200)] for player in players}
|
||||
player_receiver = {player: [r.randint(1, len(players)) for _ in range(200)] for player in players}
|
||||
for i in range(0, len(player_locations[1])):
|
||||
player_sequence = copy(players)
|
||||
r.shuffle(player_sequence)
|
||||
for finder in player_sequence:
|
||||
flags = r.choice((0, 0, 0, 0, 0, 0, 0, 1, 2, 3))
|
||||
receiver = player_receiver[finder][i]
|
||||
item = player_items[finder][i]
|
||||
location = player_locations[finder][i]
|
||||
network_item = NetworkItem(item, location, receiver, flags)
|
||||
gameplay.append(json_format_send_event(network_item, observer))
|
||||
if finder == observer:
|
||||
hint_points += 1
|
||||
gameplay.append({
|
||||
"cmd": "RoomUpdate",
|
||||
"hint_points": hint_points,
|
||||
"checked_locations": [location],
|
||||
})
|
||||
if receiver == observer:
|
||||
gameplay.append({
|
||||
"cmd": "ReceivedItems",
|
||||
"index": index,
|
||||
"items": [item],
|
||||
})
|
||||
index += 1
|
||||
return [encode(gameplay).encode("utf-8")]
|
||||
|
||||
|
||||
def main() -> None:
|
||||
#corpus = generate_data_package_corpus()
|
||||
#corpus = generate_solo_release_corpus()
|
||||
#corpus = generate_gameplay_corpus()
|
||||
corpus = generate_data_package_corpus() + generate_solo_release_corpus() + generate_gameplay_corpus()
|
||||
benchmark(corpus)
|
||||
print(f"raw size: {sum(len(data) for data in corpus)}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,4 +1,12 @@
|
||||
def run_locations_benchmark():
|
||||
def run_locations_benchmark(freeze_gc: bool = True) -> None:
|
||||
"""
|
||||
Run a benchmark of location access rule performance against an empty_state and an all_state.
|
||||
|
||||
:param freeze_gc: Whether to freeze gc before benchmarking and unfreeze gc afterward. Freezing gc moves all objects
|
||||
tracked by the garbage collector to a permanent generation, ignoring them in all future collections. Freezing
|
||||
greatly reduces the duration of running gc.collect() within benchmarks, which otherwise often takes much longer
|
||||
than running all iterations for the location rule being benchmarked.
|
||||
"""
|
||||
import argparse
|
||||
import logging
|
||||
import gc
|
||||
@@ -34,6 +42,8 @@ def run_locations_benchmark():
|
||||
return "\n".join(f" {time:.4f} in {name}" for name, time in counter.most_common(top))
|
||||
|
||||
def location_test(self, test_location: Location, state: CollectionState, state_name: str) -> float:
|
||||
if freeze_gc:
|
||||
gc.freeze()
|
||||
with TimeIt(f"{test_location.game} {self.rule_iterations} "
|
||||
f"runs of {test_location}.access_rule({state_name})", logger) as t:
|
||||
for _ in range(self.rule_iterations):
|
||||
@@ -41,6 +51,8 @@ def run_locations_benchmark():
|
||||
# if time is taken to disentangle complex ref chains,
|
||||
# this time should be attributed to the rule.
|
||||
gc.collect()
|
||||
if freeze_gc:
|
||||
gc.unfreeze()
|
||||
return t.dif
|
||||
|
||||
def main(self):
|
||||
@@ -64,9 +76,13 @@ def run_locations_benchmark():
|
||||
|
||||
gc.collect()
|
||||
for step in self.gen_steps:
|
||||
if freeze_gc:
|
||||
gc.freeze()
|
||||
with TimeIt(f"{game} step {step}", logger):
|
||||
call_all(multiworld, step)
|
||||
gc.collect()
|
||||
if freeze_gc:
|
||||
gc.unfreeze()
|
||||
|
||||
locations = sorted(multiworld.get_unfilled_locations())
|
||||
if not locations:
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
from argparse import Namespace
|
||||
from typing import List, Optional, Tuple, Type, Union
|
||||
from typing import Any, List, Optional, Tuple, Type
|
||||
|
||||
from BaseClasses import CollectionState, Item, ItemClassification, Location, MultiWorld, Region
|
||||
from worlds import network_data_package
|
||||
from worlds.AutoWorld import World, call_all
|
||||
from worlds.AutoWorld import World, WebWorld, call_all
|
||||
|
||||
gen_steps = (
|
||||
"generate_early",
|
||||
@@ -17,7 +17,7 @@ gen_steps = (
|
||||
|
||||
|
||||
def setup_solo_multiworld(
|
||||
world_type: Type[World], steps: Tuple[str, ...] = gen_steps, seed: Optional[int] = None
|
||||
world_type: Type[World], steps: Tuple[str, ...] = gen_steps, seed: Optional[int] = None
|
||||
) -> MultiWorld:
|
||||
"""
|
||||
Creates a multiworld with a single player of `world_type`, sets default options, and calls provided gen steps.
|
||||
@@ -31,8 +31,8 @@ def setup_solo_multiworld(
|
||||
return setup_multiworld(world_type, steps, seed)
|
||||
|
||||
|
||||
def setup_multiworld(worlds: Union[List[Type[World]], Type[World]], steps: Tuple[str, ...] = gen_steps,
|
||||
seed: Optional[int] = None) -> MultiWorld:
|
||||
def setup_multiworld(worlds: list[type[World]] | type[World], steps: tuple[str, ...] = gen_steps,
|
||||
seed: int | None = None, options: dict[str, Any] | list[dict[str, Any]] = None) -> MultiWorld:
|
||||
"""
|
||||
Creates a multiworld with a player for each provided world type, allowing duplicates, setting default options, and
|
||||
calling the provided gen steps.
|
||||
@@ -40,20 +40,27 @@ def setup_multiworld(worlds: Union[List[Type[World]], Type[World]], steps: Tuple
|
||||
:param worlds: Type/s of worlds to generate a multiworld for
|
||||
:param steps: Gen steps that should be called before returning. Default calls through pre_fill
|
||||
:param seed: The seed to be used when creating this multiworld
|
||||
:param options: Options to set on each world. If just one dict of options is passed, it will be used for all worlds.
|
||||
:return: The generated multiworld
|
||||
"""
|
||||
if not isinstance(worlds, list):
|
||||
worlds = [worlds]
|
||||
|
||||
if options is None:
|
||||
options = [{}] * len(worlds)
|
||||
elif not isinstance(options, list):
|
||||
options = [options] * len(worlds)
|
||||
|
||||
players = len(worlds)
|
||||
multiworld = MultiWorld(players)
|
||||
multiworld.game = {player: world_type.game for player, world_type in enumerate(worlds, 1)}
|
||||
multiworld.player_name = {player: f"Tester{player}" for player in multiworld.player_ids}
|
||||
multiworld.set_seed(seed)
|
||||
args = Namespace()
|
||||
for player, world_type in enumerate(worlds, 1):
|
||||
for player, (world_type, option_overrides) in enumerate(zip(worlds, options), 1):
|
||||
for key, option in world_type.options_dataclass.type_hints.items():
|
||||
updated_options = getattr(args, key, {})
|
||||
updated_options[player] = option.from_any(option.default)
|
||||
updated_options[player] = option.from_any(option_overrides.get(key, option.default))
|
||||
setattr(args, key, updated_options)
|
||||
multiworld.set_options(args)
|
||||
multiworld.state = CollectionState(multiworld)
|
||||
@@ -62,11 +69,16 @@ def setup_multiworld(worlds: Union[List[Type[World]], Type[World]], steps: Tuple
|
||||
return multiworld
|
||||
|
||||
|
||||
class TestWebWorld(WebWorld):
|
||||
tutorials = []
|
||||
|
||||
|
||||
class TestWorld(World):
|
||||
game = f"Test Game"
|
||||
item_name_to_id = {}
|
||||
location_name_to_id = {}
|
||||
hidden = True
|
||||
web = TestWebWorld()
|
||||
|
||||
|
||||
# add our test world to the data package, so we can test it later
|
||||
|
||||
@@ -6,9 +6,9 @@ from Utils import get_intended_text, get_input_text_from_response
|
||||
class TestClient(unittest.TestCase):
|
||||
def test_autofill_hint_from_fuzzy_hint(self) -> None:
|
||||
tests = (
|
||||
("item", ["item1", "item2"]), # Multiple close matches
|
||||
("itm", ["item1", "item21"]), # No close match, multiple option
|
||||
("item", ["item1"]), # No close match, single option
|
||||
("item", ["item1", "item2"]), # Multiple close matches
|
||||
("itm", ["item1", "item21"]), # No close match, multiple option
|
||||
("item", ["item1"]), # No close match, single option
|
||||
("item", ["\"item\" 'item' (item)"]), # Testing different special characters
|
||||
)
|
||||
|
||||
@@ -16,7 +16,7 @@ class TestClient(unittest.TestCase):
|
||||
item_name, usable, response = get_intended_text(input_text, possible_answers)
|
||||
self.assertFalse(usable, "This test must be updated, it seems get_fuzzy_results behavior changed")
|
||||
|
||||
hint_command = get_input_text_from_response(response, "hint")
|
||||
hint_command = get_input_text_from_response(response, "!hint")
|
||||
self.assertIsNotNone(hint_command,
|
||||
"The response to fuzzy hints is no longer recognized by the hint autofill")
|
||||
self.assertEqual(hint_command, f"!hint {item_name}",
|
||||
|
||||
@@ -37,10 +37,11 @@ class TestImplemented(unittest.TestCase):
|
||||
|
||||
def test_slot_data(self):
|
||||
"""Tests that if a world creates slot data, it's json serializable."""
|
||||
for game_name, world_type in AutoWorldRegister.world_types.items():
|
||||
# has an await for generate_output which isn't being called
|
||||
if game_name in {"Ocarina of Time"}:
|
||||
continue
|
||||
# has an await for generate_output which isn't being called
|
||||
excluded_games = ("Ocarina of Time",)
|
||||
worlds_to_test = {game: world
|
||||
for game, world in AutoWorldRegister.world_types.items() if game not in excluded_games}
|
||||
for game_name, world_type in worlds_to_test.items():
|
||||
multiworld = setup_solo_multiworld(world_type)
|
||||
with self.subTest(game=game_name, seed=multiworld.seed):
|
||||
distribute_items_restrictive(multiworld)
|
||||
|
||||
@@ -150,8 +150,7 @@ class TestBase(unittest.TestCase):
|
||||
"""Test that worlds don't modify the locality of items after duplicates are resolved"""
|
||||
gen_steps = ("generate_early",)
|
||||
additional_steps = ("create_regions", "create_items", "set_rules", "connect_entrances", "generate_basic", "pre_fill")
|
||||
worlds_to_test = {game: world for game, world in AutoWorldRegister.world_types.items()}
|
||||
for game_name, world_type in worlds_to_test.items():
|
||||
for game_name, world_type in AutoWorldRegister.world_types.items():
|
||||
with self.subTest("Game", game=game_name):
|
||||
multiworld = setup_solo_multiworld(world_type, gen_steps)
|
||||
local_items = multiworld.worlds[1].options.local_items.value.copy()
|
||||
|
||||
@@ -33,7 +33,10 @@ class TestBase(unittest.TestCase):
|
||||
def test_location_creation_steps(self):
|
||||
"""Tests that Regions and Locations aren't created after `create_items`."""
|
||||
gen_steps = ("generate_early", "create_regions", "create_items")
|
||||
for game_name, world_type in AutoWorldRegister.world_types.items():
|
||||
excluded_games = ("Ocarina of Time", "Pokemon Red and Blue")
|
||||
worlds_to_test = {game: world
|
||||
for game, world in AutoWorldRegister.world_types.items() if game not in excluded_games}
|
||||
for game_name, world_type in worlds_to_test.items():
|
||||
with self.subTest("Game", game_name=game_name):
|
||||
multiworld = setup_solo_multiworld(world_type, gen_steps)
|
||||
region_count = len(multiworld.get_regions())
|
||||
@@ -54,13 +57,13 @@ class TestBase(unittest.TestCase):
|
||||
call_all(multiworld, "generate_basic")
|
||||
self.assertEqual(region_count, len(multiworld.get_regions()),
|
||||
f"{game_name} modified region count during generate_basic")
|
||||
self.assertGreaterEqual(location_count, len(multiworld.get_locations()),
|
||||
self.assertEqual(location_count, len(multiworld.get_locations()),
|
||||
f"{game_name} modified locations count during generate_basic")
|
||||
|
||||
call_all(multiworld, "pre_fill")
|
||||
self.assertEqual(region_count, len(multiworld.get_regions()),
|
||||
f"{game_name} modified region count during pre_fill")
|
||||
self.assertGreaterEqual(location_count, len(multiworld.get_locations()),
|
||||
self.assertEqual(location_count, len(multiworld.get_locations()),
|
||||
f"{game_name} modified locations count during pre_fill")
|
||||
|
||||
def test_location_group(self):
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import unittest
|
||||
|
||||
from BaseClasses import PlandoOptions
|
||||
from Options import ItemLinks, Choice
|
||||
from Options import Choice, ItemLinks, PlandoConnections, PlandoItems, PlandoTexts
|
||||
from Utils import restricted_dumps
|
||||
from worlds.AutoWorld import AutoWorldRegister
|
||||
|
||||
@@ -72,8 +72,8 @@ class TestOptions(unittest.TestCase):
|
||||
for link in item_links.values():
|
||||
self.assertEqual(link.value[0], item_link_group[0])
|
||||
|
||||
def test_pickle_dumps(self):
|
||||
"""Test options can be pickled into database for WebHost generation"""
|
||||
def test_pickle_dumps_default(self):
|
||||
"""Test that default option values can be pickled into database for WebHost generation"""
|
||||
for gamename, world_type in AutoWorldRegister.world_types.items():
|
||||
if not world_type.hidden:
|
||||
for option_key, option in world_type.options_dataclass.type_hints.items():
|
||||
@@ -81,3 +81,23 @@ class TestOptions(unittest.TestCase):
|
||||
restricted_dumps(option.from_any(option.default))
|
||||
if issubclass(option, Choice) and option.default in option.name_lookup:
|
||||
restricted_dumps(option.from_text(option.name_lookup[option.default]))
|
||||
|
||||
def test_pickle_dumps_plando(self):
|
||||
"""Test that plando options using containers of a custom type can be pickled"""
|
||||
# The base PlandoConnections class can't be instantiated directly, create a subclass and then cast it
|
||||
class TestPlandoConnections(PlandoConnections):
|
||||
entrances = {"An Entrance"}
|
||||
exits = {"An Exit"}
|
||||
plando_connection_value = PlandoConnections(
|
||||
TestPlandoConnections.from_any([{"entrance": "An Entrance", "exit": "An Exit"}])
|
||||
)
|
||||
|
||||
plando_values = {
|
||||
"PlandoConnections": plando_connection_value,
|
||||
"PlandoItems": PlandoItems.from_any([{"item": "Something", "location": "Somewhere"}]),
|
||||
"PlandoTexts": PlandoTexts.from_any([{"text": "Some text.", "at": "text_box"}]),
|
||||
}
|
||||
|
||||
for option_key, value in plando_values.items():
|
||||
with self.subTest(option=option_key):
|
||||
restricted_dumps(value)
|
||||
|
||||
102
test/general/test_world_manifest.py
Normal file
102
test/general/test_world_manifest.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""Check world sources' manifest files"""
|
||||
|
||||
import json
|
||||
import unittest
|
||||
from pathlib import Path
|
||||
from typing import Any, ClassVar
|
||||
|
||||
import test
|
||||
from Utils import home_path, local_path
|
||||
from worlds.AutoWorld import AutoWorldRegister
|
||||
from ..param import classvar_matrix
|
||||
|
||||
|
||||
test_path = Path(test.__file__).parent
|
||||
worlds_paths = [
|
||||
Path(local_path("worlds")),
|
||||
Path(local_path("custom_worlds")),
|
||||
Path(home_path("worlds")),
|
||||
Path(home_path("custom_worlds")),
|
||||
]
|
||||
|
||||
# Only check source folders for now. Zip validation should probably be in the loader and/or installer.
|
||||
source_world_names = [
|
||||
k
|
||||
for k, v in AutoWorldRegister.world_types.items()
|
||||
if not v.zip_path and not Path(v.__file__).is_relative_to(test_path)
|
||||
]
|
||||
|
||||
|
||||
def get_source_world_manifest_path(game: str) -> Path | None:
|
||||
"""Get path of archipelago.json in the world's root folder from game name."""
|
||||
# TODO: add a feature to AutoWorld that makes this less annoying
|
||||
world_type = AutoWorldRegister.world_types[game]
|
||||
world_type_path = Path(world_type.__file__)
|
||||
for worlds_path in worlds_paths:
|
||||
if world_type_path.is_relative_to(worlds_path):
|
||||
world_root = worlds_path / world_type_path.relative_to(worlds_path).parents[0]
|
||||
manifest_path = world_root / "archipelago.json"
|
||||
return manifest_path if manifest_path.exists() else None
|
||||
assert False, f"{world_type_path} not found in any worlds path"
|
||||
|
||||
|
||||
# TODO: remove the filter once manifests are mandatory.
|
||||
@classvar_matrix(game=filter(get_source_world_manifest_path, source_world_names))
|
||||
class TestWorldManifest(unittest.TestCase):
|
||||
game: ClassVar[str]
|
||||
manifest: ClassVar[dict[str, Any]]
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls) -> None:
|
||||
world_type = AutoWorldRegister.world_types[cls.game]
|
||||
assert world_type.game == cls.game
|
||||
manifest_path = get_source_world_manifest_path(cls.game)
|
||||
assert manifest_path # make mypy happy
|
||||
with manifest_path.open("r", encoding="utf-8") as f:
|
||||
cls.manifest = json.load(f)
|
||||
|
||||
def test_game(self) -> None:
|
||||
"""Test that 'game' will be correctly defined when generating APWorld manifest from source."""
|
||||
self.assertIn(
|
||||
"game",
|
||||
self.manifest,
|
||||
f"archipelago.json manifest exists for {self.game} but does not contain 'game'",
|
||||
)
|
||||
self.assertEqual(
|
||||
self.manifest["game"],
|
||||
self.game,
|
||||
f"archipelago.json manifest for {self.game} specifies wrong game '{self.manifest['game']}'",
|
||||
)
|
||||
|
||||
def test_world_version(self) -> None:
|
||||
"""Test that world_version matches the requirements in apworld specification.md"""
|
||||
if "world_version" in self.manifest:
|
||||
world_version: str = self.manifest["world_version"]
|
||||
self.assertIsInstance(
|
||||
world_version,
|
||||
str,
|
||||
f"world_version in archipelago.json for '{self.game}' has to be string if provided.",
|
||||
)
|
||||
parts = world_version.split(".")
|
||||
self.assertEqual(
|
||||
len(parts),
|
||||
3,
|
||||
f"world_version in archipelago.json for '{self.game}' has to be in the form of 'major.minor.build'.",
|
||||
)
|
||||
for part in parts:
|
||||
self.assertTrue(
|
||||
part.isdigit(),
|
||||
f"world_version in archipelago.json for '{self.game}' may only contain numbers.",
|
||||
)
|
||||
|
||||
def test_no_container_version(self) -> None:
|
||||
self.assertNotIn(
|
||||
"version",
|
||||
self.manifest,
|
||||
f"archipelago.json for '{self.game}' must not define 'version', see apworld specification.md.",
|
||||
)
|
||||
self.assertNotIn(
|
||||
"compatible_version",
|
||||
self.manifest,
|
||||
f"archipelago.json for '{self.game}' must not define 'compatible_version', see apworld specification.md.",
|
||||
)
|
||||
@@ -3,6 +3,7 @@
|
||||
# Run with `python test/hosting` instead,
|
||||
import logging
|
||||
import traceback
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
from time import sleep
|
||||
from typing import Any
|
||||
@@ -11,7 +12,7 @@ from test.hosting.client import Client
|
||||
from test.hosting.generate import generate_local
|
||||
from test.hosting.serve import ServeGame, LocalServeGame, WebHostServeGame
|
||||
from test.hosting.webhost import (create_room, get_app, get_multidata_for_room, set_multidata_for_room, start_room,
|
||||
stop_autohost, upload_multidata)
|
||||
stop_autogen, stop_autohost, upload_multidata, generate_remote)
|
||||
from test.hosting.world import copy as copy_world, delete as delete_world
|
||||
|
||||
failure = False
|
||||
@@ -56,35 +57,62 @@ else:
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
warnings.simplefilter("ignore", ResourceWarning)
|
||||
warnings.simplefilter("ignore", UserWarning)
|
||||
warnings.simplefilter("ignore", DeprecationWarning)
|
||||
|
||||
spacer = '=' * 80
|
||||
|
||||
with TemporaryDirectory() as tempdir:
|
||||
empty_file = str(Path(tempdir) / "empty")
|
||||
open(empty_file, "w").close()
|
||||
sys.argv += ["--config_override", empty_file] # tests #5541
|
||||
multis = [["VVVVVV"], ["Temp World"], ["VVVVVV", "Temp World"]]
|
||||
p1_games = []
|
||||
data_paths = []
|
||||
rooms = []
|
||||
p1_games: list[str] = []
|
||||
data_paths: list[Path | None] = []
|
||||
rooms: list[str] = []
|
||||
multidata: Path | None
|
||||
|
||||
copy_world("VVVVVV", "Temp World")
|
||||
try:
|
||||
for n, games in enumerate(multis, 1):
|
||||
print(f"Generating [{n}] {', '.join(games)}")
|
||||
print(f"Generating [{n}] {', '.join(games)} offline")
|
||||
multidata = generate_local(games, tempdir)
|
||||
print(f"Generated [{n}] {', '.join(games)} as {multidata}\n")
|
||||
p1_games.append(games[0])
|
||||
data_paths.append(multidata)
|
||||
p1_games.append(games[0])
|
||||
finally:
|
||||
delete_world("Temp World")
|
||||
|
||||
webapp = get_app(tempdir)
|
||||
webhost_client = webapp.test_client()
|
||||
|
||||
for n, multidata in enumerate(data_paths, 1):
|
||||
assert multidata
|
||||
seed = upload_multidata(webhost_client, multidata)
|
||||
print(f"Uploaded [{n}] {multidata} as {seed}\n")
|
||||
room = create_room(webhost_client, seed)
|
||||
print(f"Uploaded [{n}] {multidata} as {room}\n")
|
||||
print(f"Started [{n}] {seed} as {room}\n")
|
||||
rooms.append(room)
|
||||
|
||||
# Generate 1 extra game on WebHost
|
||||
from WebHostLib.autolauncher import autogen
|
||||
for n, games in enumerate(multis[:1], len(multis) + 1):
|
||||
multis.append(games)
|
||||
try:
|
||||
print(f"Generating [{n}] {', '.join(games)} online")
|
||||
autogen(webapp.config)
|
||||
sleep(5) # until we have lazy loading of worlds, wait here for the process to start up
|
||||
seed = generate_remote(webhost_client, games)
|
||||
print(f"Generated [{n}] {', '.join(games)} as {seed}\n")
|
||||
finally:
|
||||
stop_autogen()
|
||||
data_paths.append(None) # WebHost-only
|
||||
room = create_room(webhost_client, seed)
|
||||
print(f"Started [{n}] {seed} as {room}\n")
|
||||
rooms.append(room)
|
||||
|
||||
print("Starting autohost")
|
||||
@@ -96,31 +124,10 @@ if __name__ == "__main__":
|
||||
for n, (multidata, room, game, multi_games) in enumerate(zip(data_paths, rooms, p1_games, multis), 1):
|
||||
involved_games = {"Archipelago"} | set(multi_games)
|
||||
for collected_items in range(3):
|
||||
print(f"\nTesting [{n}] {game} in {multidata} on MultiServer with {collected_items} items collected")
|
||||
with LocalServeGame(multidata) as host:
|
||||
with Client(host.address, game, "Player1") as client:
|
||||
local_data_packages = client.games_packages
|
||||
local_collected_items = len(client.checked_locations)
|
||||
if collected_items < 2: # Don't collect anything on the last iteration
|
||||
client.collect_any()
|
||||
# TODO: Ctrl+C test here as well
|
||||
|
||||
for game_name in sorted(involved_games):
|
||||
expect_true(game_name in local_data_packages,
|
||||
f"{game_name} missing from MultiServer datap ackage")
|
||||
expect_true("item_name_groups" not in local_data_packages.get(game_name, {}),
|
||||
f"item_name_groups are not supposed to be in MultiServer data for {game_name}")
|
||||
expect_true("location_name_groups" not in local_data_packages.get(game_name, {}),
|
||||
f"location_name_groups are not supposed to be in MultiServer data for {game_name}")
|
||||
for game_name in local_data_packages:
|
||||
expect_true(game_name in involved_games,
|
||||
f"Received unexpected extra data package for {game_name} from MultiServer")
|
||||
assert_equal(local_collected_items, collected_items,
|
||||
"MultiServer did not load or save correctly")
|
||||
|
||||
print(f"\nTesting [{n}] {game} in {multidata} on customserver with {collected_items} items collected")
|
||||
prev_host_adr: str
|
||||
with WebHostServeGame(webhost_client, room) as host:
|
||||
sleep(.1) # wait for the server to fully start before doing anything
|
||||
prev_host_adr = host.address
|
||||
with Client(host.address, game, "Player1") as client:
|
||||
web_data_packages = client.games_packages
|
||||
@@ -134,6 +141,7 @@ if __name__ == "__main__":
|
||||
autohost(webapp.config) # this will spin the room right up again
|
||||
sleep(1) # make log less annoying
|
||||
# if saving failed, the next iteration will fail below
|
||||
sleep(2) # work around issue #5571
|
||||
|
||||
# verify server shut down
|
||||
try:
|
||||
@@ -156,6 +164,31 @@ if __name__ == "__main__":
|
||||
"customserver did not load or save correctly during/after "
|
||||
+ ("Ctrl+C" if collected_items == 2 else "/exit"))
|
||||
|
||||
if not multidata:
|
||||
continue # games rolled on WebHost can not be tested against MultiServer
|
||||
|
||||
print(f"\nTesting [{n}] {game} in {multidata} on MultiServer with {collected_items} items collected")
|
||||
with LocalServeGame(multidata) as host:
|
||||
with Client(host.address, game, "Player1") as client:
|
||||
local_data_packages = client.games_packages
|
||||
local_collected_items = len(client.checked_locations)
|
||||
if collected_items < 2: # Don't collect anything on the last iteration
|
||||
client.collect_any()
|
||||
# TODO: Ctrl+C test here as well
|
||||
|
||||
for game_name in sorted(involved_games):
|
||||
expect_true(game_name in local_data_packages,
|
||||
f"{game_name} missing from MultiServer datapackage")
|
||||
expect_true("item_name_groups" not in local_data_packages.get(game_name, {}),
|
||||
f"item_name_groups are not supposed to be in MultiServer data for {game_name}")
|
||||
expect_true("location_name_groups" not in local_data_packages.get(game_name, {}),
|
||||
f"location_name_groups are not supposed to be in MultiServer data for {game_name}")
|
||||
for game_name in local_data_packages:
|
||||
expect_true(game_name in involved_games,
|
||||
f"Received unexpected extra data package for {game_name} from MultiServer")
|
||||
assert_equal(local_collected_items, collected_items,
|
||||
"MultiServer did not load or save correctly")
|
||||
|
||||
# compare customserver to MultiServer
|
||||
expect_equal(local_data_packages, web_data_packages,
|
||||
"customserver datapackage differs from MultiServer")
|
||||
@@ -176,10 +209,12 @@ if __name__ == "__main__":
|
||||
print(f"Restoring multidata for {room}")
|
||||
set_multidata_for_room(webhost_client, room, old_data)
|
||||
with WebHostServeGame(webhost_client, room) as host:
|
||||
sleep(.1) # wait for the server to fully start before doing anything
|
||||
with Client(host.address, game, "Player1") as client:
|
||||
assert_equal(len(client.checked_locations), 2,
|
||||
"Save was destroyed during exception in customserver")
|
||||
print("Save file is not busted 🥳")
|
||||
sleep(2) # work around issue #5571
|
||||
|
||||
finally:
|
||||
print("Stopping autohost")
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
import io
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
import zipfile
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Optional, cast
|
||||
from typing import TYPE_CHECKING, Iterable, Optional, cast
|
||||
|
||||
from WebHostLib import to_python
|
||||
|
||||
@@ -10,6 +14,7 @@ if TYPE_CHECKING:
|
||||
|
||||
__all__ = [
|
||||
"get_app",
|
||||
"generate_remote",
|
||||
"upload_multidata",
|
||||
"create_room",
|
||||
"start_room",
|
||||
@@ -17,6 +22,7 @@ __all__ = [
|
||||
"set_room_timeout",
|
||||
"get_multidata_for_room",
|
||||
"set_multidata_for_room",
|
||||
"stop_autogen",
|
||||
"stop_autohost",
|
||||
]
|
||||
|
||||
@@ -33,10 +39,43 @@ def get_app(tempdir: str) -> "Flask":
|
||||
"TESTING": True,
|
||||
"HOST_ADDRESS": "localhost",
|
||||
"HOSTERS": 1,
|
||||
"GENERATORS": 1,
|
||||
"JOB_THRESHOLD": 1,
|
||||
})
|
||||
return get_app()
|
||||
|
||||
|
||||
def generate_remote(app_client: "FlaskClient", games: Iterable[str]) -> str:
|
||||
data = io.BytesIO()
|
||||
with zipfile.ZipFile(data, "a", zipfile.ZIP_DEFLATED, False) as zip_file:
|
||||
for n, game in enumerate(games, 1):
|
||||
name = f"{n}.yaml"
|
||||
zip_file.writestr(name, json.dumps({
|
||||
"name": f"Player{n}",
|
||||
"game": game,
|
||||
game: {},
|
||||
"description": f"generate_remote slot {n} ('Player{n}'): {game}",
|
||||
}))
|
||||
data.seek(0)
|
||||
response = app_client.post("/generate", content_type="multipart/form-data", data={
|
||||
"file": (data, "yamls.zip"),
|
||||
})
|
||||
assert response.status_code < 400, f"Starting gen failed: status {response.status_code}"
|
||||
assert "Location" in response.headers, f"Starting gen failed: no redirect"
|
||||
location = response.headers["Location"]
|
||||
assert isinstance(location, str)
|
||||
assert location.startswith("/wait/"), f"Starting WebHost gen failed: unexpected redirect to {location}"
|
||||
for attempt in range(10):
|
||||
response = app_client.get(location)
|
||||
if "Location" in response.headers:
|
||||
location = response.headers["Location"]
|
||||
assert isinstance(location, str)
|
||||
assert location.startswith("/seed/"), f"Finishing WebHost gen failed: unexpected redirect to {location}"
|
||||
return location[6:]
|
||||
time.sleep(1)
|
||||
raise TimeoutError("WebHost gen did not finish")
|
||||
|
||||
|
||||
def upload_multidata(app_client: "FlaskClient", multidata: Path) -> str:
|
||||
response = app_client.post("/uploads", data={
|
||||
"file": multidata.open("rb"),
|
||||
@@ -188,7 +227,7 @@ def set_multidata_for_room(webhost_client: "FlaskClient", room_id: str, data: by
|
||||
room.seed.multidata = data
|
||||
|
||||
|
||||
def stop_autohost(graceful: bool = True) -> None:
|
||||
def _stop_webhost_mp(name_filter: str, graceful: bool = True) -> None:
|
||||
import os
|
||||
import signal
|
||||
|
||||
@@ -198,13 +237,30 @@ def stop_autohost(graceful: bool = True) -> None:
|
||||
|
||||
stop()
|
||||
proc: multiprocessing.process.BaseProcess
|
||||
for proc in filter(lambda child: child.name.startswith("MultiHoster"), multiprocessing.active_children()):
|
||||
for proc in filter(lambda child: child.name.startswith(name_filter), multiprocessing.active_children()):
|
||||
# FIXME: graceful currently does not work on Windows because the signals are not properly emulated
|
||||
# and ungraceful may not save the game
|
||||
if proc.pid == os.getpid():
|
||||
continue
|
||||
if graceful and proc.pid:
|
||||
os.kill(proc.pid, getattr(signal, "CTRL_C_EVENT", signal.SIGINT))
|
||||
else:
|
||||
proc.kill()
|
||||
try:
|
||||
proc.join(30)
|
||||
try:
|
||||
proc.join(30)
|
||||
except TimeoutError:
|
||||
raise
|
||||
except KeyboardInterrupt:
|
||||
# on Windows, the MP exception may be forwarded to the host, so ignore once and retry
|
||||
proc.join(30)
|
||||
except TimeoutError:
|
||||
proc.kill()
|
||||
proc.join()
|
||||
|
||||
def stop_autogen(graceful: bool = True) -> None:
|
||||
# FIXME: this name filter is jank, but there seems to be no way to add a custom prefix for a Pool
|
||||
_stop_webhost_mp("SpawnPoolWorker-", graceful)
|
||||
|
||||
def stop_autohost(graceful: bool = True) -> None:
|
||||
_stop_webhost_mp("MultiHoster", graceful)
|
||||
|
||||
@@ -11,7 +11,7 @@ _new_worlds: dict[str, str] = {}
|
||||
|
||||
def copy(src: str, dst: str) -> None:
|
||||
from Utils import get_file_safe_name
|
||||
from worlds import AutoWorldRegister
|
||||
from worlds.AutoWorld import AutoWorldRegister
|
||||
|
||||
assert dst not in _new_worlds, "World already created"
|
||||
if '"' in dst or "\\" in dst: # easier to reject than to escape
|
||||
|
||||
14
test/utils/test_daemon_thread_pool.py
Normal file
14
test/utils/test_daemon_thread_pool.py
Normal file
@@ -0,0 +1,14 @@
|
||||
import unittest
|
||||
|
||||
from Utils import DaemonThreadPoolExecutor
|
||||
|
||||
|
||||
class DaemonThreadPoolExecutorTest(unittest.TestCase):
|
||||
def test_is_daemon(self) -> None:
|
||||
def run() -> None:
|
||||
pass
|
||||
|
||||
with DaemonThreadPoolExecutor(1) as executor:
|
||||
executor.submit(run)
|
||||
|
||||
self.assertTrue(next(iter(executor._threads)).daemon)
|
||||
78
test/webhost/test_markdown.py
Normal file
78
test/webhost/test_markdown.py
Normal file
@@ -0,0 +1,78 @@
|
||||
import os
|
||||
import unittest
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
from mistune import HTMLRenderer, Markdown
|
||||
|
||||
from WebHostLib.markdown import ImgUrlRewriteInlineParser, render_markdown
|
||||
|
||||
|
||||
class ImgUrlRewriteTest(unittest.TestCase):
|
||||
markdown: Markdown
|
||||
base_url = "/static/generated/docs/some_game"
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.markdown = Markdown(
|
||||
renderer=HTMLRenderer(escape=False),
|
||||
inline=ImgUrlRewriteInlineParser(self.base_url),
|
||||
)
|
||||
|
||||
def test_relative_img_rewrite(self) -> None:
|
||||
html = self.markdown("")
|
||||
self.assertIn(f'src="{self.base_url}/image.png"', html)
|
||||
|
||||
def test_absolute_img_no_rewrite(self) -> None:
|
||||
html = self.markdown("")
|
||||
self.assertIn(f'src="/image.png"', html)
|
||||
self.assertNotIn(self.base_url, html)
|
||||
|
||||
def test_remote_img_no_rewrite(self) -> None:
|
||||
html = self.markdown("")
|
||||
self.assertIn(f'src="https://example.com/image.png"', html)
|
||||
self.assertNotIn(self.base_url, html)
|
||||
|
||||
def test_relative_link_no_rewrite(self) -> None:
|
||||
# The parser is only supposed to update images, not links.
|
||||
html = self.markdown("[Link](image.png)")
|
||||
self.assertIn(f'href="image.png"', html)
|
||||
self.assertNotIn(self.base_url, html)
|
||||
|
||||
def test_absolute_link_no_rewrite(self) -> None:
|
||||
html = self.markdown("[Link](/image.png)")
|
||||
self.assertIn(f'href="/image.png"', html)
|
||||
self.assertNotIn(self.base_url, html)
|
||||
|
||||
def test_auto_link_no_rewrite(self) -> None:
|
||||
html = self.markdown("<https://example.com/image.png>")
|
||||
self.assertIn(f'href="https://example.com/image.png"', html)
|
||||
self.assertNotIn(self.base_url, html)
|
||||
|
||||
def test_relative_img_to_other_game(self) -> None:
|
||||
html = self.markdown("")
|
||||
self.assertIn(f'src="{self.base_url}/../Archipelago/image.png"', html)
|
||||
|
||||
|
||||
class RenderMarkdownTest(unittest.TestCase):
|
||||
"""Tests that render_markdown does the right thing."""
|
||||
base_url = "/static/generated/docs/some_game"
|
||||
|
||||
def test_relative_img_rewrite(self) -> None:
|
||||
f = NamedTemporaryFile(delete=False)
|
||||
try:
|
||||
f.write("".encode("utf-8"))
|
||||
f.close()
|
||||
html = render_markdown(f.name, self.base_url)
|
||||
self.assertIn(f'src="{self.base_url}/image.png"', html)
|
||||
finally:
|
||||
os.unlink(f.name)
|
||||
|
||||
def test_no_img_rewrite(self) -> None:
|
||||
f = NamedTemporaryFile(delete=False)
|
||||
try:
|
||||
f.write("".encode("utf-8"))
|
||||
f.close()
|
||||
html = render_markdown(f.name)
|
||||
self.assertIn(f'src="image.png"', html)
|
||||
self.assertNotIn(self.base_url, html)
|
||||
finally:
|
||||
os.unlink(f.name)
|
||||
63
test/webhost/test_sitemap.py
Normal file
63
test/webhost/test_sitemap.py
Normal file
@@ -0,0 +1,63 @@
|
||||
import urllib.parse
|
||||
import html
|
||||
import re
|
||||
from flask import url_for
|
||||
|
||||
import WebHost
|
||||
from . import TestBase
|
||||
|
||||
|
||||
class TestSitemap(TestBase):
|
||||
|
||||
# Codes for OK and some redirects that we use
|
||||
valid_status_codes = [200, 302, 308]
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls) -> None:
|
||||
super().setUpClass()
|
||||
WebHost.copy_tutorials_files_to_static()
|
||||
|
||||
def test_sitemap_route(self) -> None:
|
||||
"""Verify that the sitemap route works correctly and renders the template without errors."""
|
||||
with self.app.test_request_context():
|
||||
# Test the /sitemap route
|
||||
with self.client.open("/sitemap") as response:
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(b"Site Map", response.data)
|
||||
|
||||
# Test the /index route which should also serve the sitemap
|
||||
with self.client.open("/index") as response:
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(b"Site Map", response.data)
|
||||
|
||||
# Test using url_for with the function name
|
||||
with self.client.open(url_for('get_sitemap')) as response:
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(b'Site Map', response.data)
|
||||
|
||||
def test_sitemap_links(self) -> None:
|
||||
"""
|
||||
Verify that all links in the sitemap are valid by making a request to each one.
|
||||
"""
|
||||
with self.app.test_request_context():
|
||||
with self.client.open(url_for("get_sitemap")) as response:
|
||||
self.assertEqual(response.status_code, 200)
|
||||
html_content = response.data.decode()
|
||||
|
||||
# Extract all href links using regex
|
||||
href_pattern = re.compile(r'href=["\'](.*?)["\']')
|
||||
links = href_pattern.findall(html_content)
|
||||
|
||||
self.assertTrue(len(links) > 0, "No links found in sitemap")
|
||||
|
||||
# Test each link
|
||||
for link in links:
|
||||
# Skip external links
|
||||
if link.startswith(("http://", "https://")):
|
||||
continue
|
||||
|
||||
link = urllib.parse.unquote(html.unescape(link))
|
||||
|
||||
with self.client.open(link) as response, self.subTest(link=link):
|
||||
self.assertIn(response.status_code, self.valid_status_codes,
|
||||
f"Link {link} returned invalid status code {response.status_code}")
|
||||
@@ -93,3 +93,13 @@ class TestTracker(TestBase):
|
||||
headers={"If-Modified-Since": "Wed, 21 Oct 2015 07:28:00"}, # missing timezone
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
|
||||
def test_tracker_api(self) -> None:
|
||||
"""Verify that tracker api gives a reply for the room."""
|
||||
with self.app.test_request_context():
|
||||
with self.client.open(url_for("api.tracker_data", tracker=self.tracker_uuid)) as response:
|
||||
self.assertEqual(response.status_code, 200)
|
||||
with self.client.open(url_for("api.static_tracker_data", tracker=self.tracker_uuid)) as response:
|
||||
self.assertEqual(response.status_code, 200)
|
||||
with self.client.open(url_for("api.tracker_slot_data", tracker=self.tracker_uuid)) as response:
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
@@ -1,17 +1,46 @@
|
||||
def load_tests(loader, standard_tests, pattern):
|
||||
from typing import TYPE_CHECKING
|
||||
if TYPE_CHECKING:
|
||||
from unittest import TestLoader, TestSuite
|
||||
|
||||
|
||||
def load_tests(loader: "TestLoader", standard_tests: "TestSuite", pattern: str):
|
||||
import os
|
||||
import unittest
|
||||
import fnmatch
|
||||
from .. import file_path
|
||||
from worlds.AutoWorld import AutoWorldRegister
|
||||
|
||||
suite = unittest.TestSuite()
|
||||
suite.addTests(standard_tests)
|
||||
|
||||
# pattern hack
|
||||
# all tests from within __init__ are always imported, so we need to filter out the folder earlier
|
||||
# if the pattern isn't matching a specific world, we don't have much of a solution
|
||||
|
||||
if pattern.startswith("worlds."):
|
||||
if pattern.endswith(".py"):
|
||||
pattern = pattern[:-3]
|
||||
components = pattern.split(".")
|
||||
world_glob = f"worlds.{components[1]}"
|
||||
pattern = components[-1]
|
||||
|
||||
elif pattern.startswith(f"worlds{os.path.sep}") or pattern.startswith(f"worlds{os.path.altsep}"):
|
||||
components = pattern.split(os.path.sep)
|
||||
if len(components) == 1:
|
||||
components = pattern.split(os.path.altsep)
|
||||
world_glob = f"worlds.{components[1]}"
|
||||
pattern = components[-1]
|
||||
else:
|
||||
world_glob = "*"
|
||||
|
||||
|
||||
folders = [os.path.join(os.path.split(world.__file__)[0], "test")
|
||||
for world in AutoWorldRegister.world_types.values()]
|
||||
for world in AutoWorldRegister.world_types.values()
|
||||
if fnmatch.fnmatch(world.__module__, world_glob)]
|
||||
|
||||
all_tests = [
|
||||
test_case for folder in folders if os.path.exists(folder)
|
||||
for test_collection in loader.discover(folder, top_level_dir=file_path)
|
||||
for test_collection in loader.discover(folder, top_level_dir=file_path, pattern=pattern)
|
||||
for test_suite in test_collection if isinstance(test_suite, unittest.suite.TestSuite)
|
||||
for test_case in test_suite
|
||||
]
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user