Merge remote-tracking branch 'origin/dev' into dev
This commit is contained in:
2
.github/pyright-config.json
vendored
2
.github/pyright-config.json
vendored
@@ -29,7 +29,7 @@
|
|||||||
"reportMissingImports": true,
|
"reportMissingImports": true,
|
||||||
"reportMissingTypeStubs": true,
|
"reportMissingTypeStubs": true,
|
||||||
|
|
||||||
"pythonVersion": "3.10",
|
"pythonVersion": "3.11",
|
||||||
"pythonPlatform": "Windows",
|
"pythonPlatform": "Windows",
|
||||||
|
|
||||||
"executionEnvironments": [
|
"executionEnvironments": [
|
||||||
|
|||||||
2
.github/workflows/analyze-modified-files.yml
vendored
2
.github/workflows/analyze-modified-files.yml
vendored
@@ -53,7 +53,7 @@ jobs:
|
|||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v5
|
||||||
if: env.diff != ''
|
if: env.diff != ''
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.11'
|
||||||
|
|
||||||
- name: "Install dependencies"
|
- name: "Install dependencies"
|
||||||
if: env.diff != ''
|
if: env.diff != ''
|
||||||
|
|||||||
15
.github/workflows/build.yml
vendored
15
.github/workflows/build.yml
vendored
@@ -9,22 +9,25 @@ on:
|
|||||||
- 'setup.py'
|
- 'setup.py'
|
||||||
- 'requirements.txt'
|
- 'requirements.txt'
|
||||||
- '*.iss'
|
- '*.iss'
|
||||||
|
- 'worlds/*/archipelago.json'
|
||||||
pull_request:
|
pull_request:
|
||||||
paths:
|
paths:
|
||||||
- '.github/workflows/build.yml'
|
- '.github/workflows/build.yml'
|
||||||
- 'setup.py'
|
- 'setup.py'
|
||||||
- 'requirements.txt'
|
- 'requirements.txt'
|
||||||
- '*.iss'
|
- '*.iss'
|
||||||
|
- 'worlds/*/archipelago.json'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
env:
|
||||||
ENEMIZER_VERSION: 7.1
|
ENEMIZER_VERSION: 7.1
|
||||||
# NOTE: since appimage/appimagetool and appimage/type2-runtime does not have tags anymore,
|
# NOTE: since appimage/appimagetool and appimage/type2-runtime does not have tags anymore,
|
||||||
# we check the sha256 and require manual intervention if it was updated.
|
# we check the sha256 and require manual intervention if it was updated.
|
||||||
APPIMAGETOOL_VERSION: continuous
|
APPIMAGE_FORK: 'PopTracker'
|
||||||
APPIMAGETOOL_X86_64_HASH: '363dafac070b65cc36ca024b74db1f043c6f5cd7be8fca760e190dce0d18d684'
|
APPIMAGETOOL_VERSION: 'r-2025-10-19'
|
||||||
APPIMAGE_RUNTIME_VERSION: continuous
|
APPIMAGETOOL_X86_64_HASH: '9493a6b253a01f84acb9c624c38810ecfa11d99daa829b952b0bff43113080f9'
|
||||||
APPIMAGE_RUNTIME_X86_64_HASH: 'e3c4dfb70eddf42e7e5a1d28dff396d30563aa9a901970aebe6f01f3fecf9f8e'
|
APPIMAGE_RUNTIME_VERSION: 'r-2025-08-11'
|
||||||
|
APPIMAGE_RUNTIME_X86_64_HASH: 'e70ffa9b69b211574d0917adc482dd66f25a0083427b5945783965d55b0b0a8b'
|
||||||
|
|
||||||
permissions: # permissions required for attestation
|
permissions: # permissions required for attestation
|
||||||
id-token: 'write'
|
id-token: 'write'
|
||||||
@@ -139,9 +142,9 @@ jobs:
|
|||||||
- name: Install build-time dependencies
|
- name: Install build-time dependencies
|
||||||
run: |
|
run: |
|
||||||
echo "PYTHON=python3.12" >> $GITHUB_ENV
|
echo "PYTHON=python3.12" >> $GITHUB_ENV
|
||||||
wget -nv https://github.com/AppImage/appimagetool/releases/download/$APPIMAGETOOL_VERSION/appimagetool-x86_64.AppImage
|
wget -nv https://github.com/$APPIMAGE_FORK/appimagetool/releases/download/$APPIMAGETOOL_VERSION/appimagetool-x86_64.AppImage
|
||||||
echo "$APPIMAGETOOL_X86_64_HASH appimagetool-x86_64.AppImage" | sha256sum -c
|
echo "$APPIMAGETOOL_X86_64_HASH appimagetool-x86_64.AppImage" | sha256sum -c
|
||||||
wget -nv https://github.com/AppImage/type2-runtime/releases/download/$APPIMAGE_RUNTIME_VERSION/runtime-x86_64
|
wget -nv https://github.com/$APPIMAGE_FORK/type2-runtime/releases/download/$APPIMAGE_RUNTIME_VERSION/runtime-x86_64
|
||||||
echo "$APPIMAGE_RUNTIME_X86_64_HASH runtime-x86_64" | sha256sum -c
|
echo "$APPIMAGE_RUNTIME_X86_64_HASH runtime-x86_64" | sha256sum -c
|
||||||
chmod a+rx appimagetool-x86_64.AppImage
|
chmod a+rx appimagetool-x86_64.AppImage
|
||||||
./appimagetool-x86_64.AppImage --appimage-extract
|
./appimagetool-x86_64.AppImage --appimage-extract
|
||||||
|
|||||||
154
.github/workflows/docker.yml
vendored
Normal file
154
.github/workflows/docker.yml
vendored
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
name: Build and Publish Docker Images
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- "**"
|
||||||
|
- "!docs/**"
|
||||||
|
- "!deploy/**"
|
||||||
|
- "!setup.py"
|
||||||
|
- "!.gitignore"
|
||||||
|
- "!.github/workflows/**"
|
||||||
|
- ".github/workflows/docker.yml"
|
||||||
|
branches:
|
||||||
|
- "*"
|
||||||
|
tags:
|
||||||
|
- "v?[0-9]+.[0-9]+.[0-9]*"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
REGISTRY: ghcr.io
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
prepare:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
image-name: ${{ steps.image.outputs.name }}
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
package-name: ${{ steps.package.outputs.name }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set lowercase image name
|
||||||
|
id: image
|
||||||
|
run: |
|
||||||
|
echo "name=${GITHUB_REPOSITORY,,}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Set package name
|
||||||
|
id: package
|
||||||
|
run: |
|
||||||
|
echo "name=$(basename ${GITHUB_REPOSITORY,,})" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Extract metadata
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ steps.image.outputs.name }}
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch,enable={{is_not_default_branch}}
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=semver,pattern={{major}}.{{minor}}
|
||||||
|
type=raw,value=nightly,enable={{is_default_branch}}
|
||||||
|
|
||||||
|
- name: Compute final tags
|
||||||
|
id: final-tags
|
||||||
|
run: |
|
||||||
|
readarray -t tags <<< "${{ steps.meta.outputs.tags }}"
|
||||||
|
|
||||||
|
if [[ "${{ github.ref_type }}" == "tag" ]]; then
|
||||||
|
tag="${{ github.ref_name }}"
|
||||||
|
if [[ "$tag" =~ ^v?[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||||
|
full_latest="${{ env.REGISTRY }}/${{ steps.image.outputs.name }}:latest"
|
||||||
|
# Check if latest is already in tags to avoid duplicates
|
||||||
|
if ! printf '%s\n' "${tags[@]}" | grep -q "^$full_latest$"; then
|
||||||
|
tags+=("$full_latest")
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Set multiline output
|
||||||
|
echo "tags<<EOF" >> $GITHUB_OUTPUT
|
||||||
|
printf '%s\n' "${tags[@]}" >> $GITHUB_OUTPUT
|
||||||
|
echo "EOF" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
build:
|
||||||
|
needs: prepare
|
||||||
|
runs-on: ${{ matrix.runner }}
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- platform: amd64
|
||||||
|
runner: ubuntu-latest
|
||||||
|
suffix: amd64
|
||||||
|
cache-scope: amd64
|
||||||
|
- platform: arm64
|
||||||
|
runner: ubuntu-24.04-arm
|
||||||
|
suffix: arm64
|
||||||
|
cache-scope: arm64
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Log in to GitHub Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Compute suffixed tags
|
||||||
|
id: tags
|
||||||
|
run: |
|
||||||
|
readarray -t tags <<< "${{ needs.prepare.outputs.tags }}"
|
||||||
|
suffixed=()
|
||||||
|
for t in "${tags[@]}"; do
|
||||||
|
suffixed+=("$t-${{ matrix.suffix }}")
|
||||||
|
done
|
||||||
|
echo "tags=$(IFS=','; echo "${suffixed[*]}")" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Build and push Docker image
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: ./Dockerfile
|
||||||
|
platforms: linux/${{ matrix.platform }}
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.tags.outputs.tags }}
|
||||||
|
labels: ${{ needs.prepare.outputs.labels }}
|
||||||
|
cache-from: type=gha,scope=${{ matrix.cache-scope }}
|
||||||
|
cache-to: type=gha,mode=max,scope=${{ matrix.cache-scope }}
|
||||||
|
provenance: false
|
||||||
|
|
||||||
|
manifest:
|
||||||
|
needs: [prepare, build]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
steps:
|
||||||
|
- name: Log in to GitHub Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Create and push multi-arch manifest
|
||||||
|
run: |
|
||||||
|
readarray -t tag_array <<< "${{ needs.prepare.outputs.tags }}"
|
||||||
|
|
||||||
|
for tag in "${tag_array[@]}"; do
|
||||||
|
docker manifest create "$tag" \
|
||||||
|
"$tag-amd64" \
|
||||||
|
"$tag-arm64"
|
||||||
|
|
||||||
|
docker manifest push "$tag"
|
||||||
|
done
|
||||||
1
.github/workflows/label-pull-requests.yml
vendored
1
.github/workflows/label-pull-requests.yml
vendored
@@ -12,7 +12,6 @@ env:
|
|||||||
jobs:
|
jobs:
|
||||||
labeler:
|
labeler:
|
||||||
name: 'Apply content-based labels'
|
name: 'Apply content-based labels'
|
||||||
if: github.event.action == 'opened' || github.event.action == 'reopened' || github.event.action == 'synchronize'
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/labeler@v5
|
- uses: actions/labeler@v5
|
||||||
|
|||||||
15
.github/workflows/release.yml
vendored
15
.github/workflows/release.yml
vendored
@@ -5,16 +5,17 @@ name: Release
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- '*.*.*'
|
- 'v?[0-9]+.[0-9]+.[0-9]*'
|
||||||
|
|
||||||
env:
|
env:
|
||||||
ENEMIZER_VERSION: 7.1
|
ENEMIZER_VERSION: 7.1
|
||||||
# NOTE: since appimage/appimagetool and appimage/type2-runtime does not have tags anymore,
|
# NOTE: since appimage/appimagetool and appimage/type2-runtime does not have tags anymore,
|
||||||
# we check the sha256 and require manual intervention if it was updated.
|
# we check the sha256 and require manual intervention if it was updated.
|
||||||
APPIMAGETOOL_VERSION: continuous
|
APPIMAGE_FORK: 'PopTracker'
|
||||||
APPIMAGETOOL_X86_64_HASH: '363dafac070b65cc36ca024b74db1f043c6f5cd7be8fca760e190dce0d18d684'
|
APPIMAGETOOL_VERSION: 'r-2025-10-19'
|
||||||
APPIMAGE_RUNTIME_VERSION: continuous
|
APPIMAGETOOL_X86_64_HASH: '9493a6b253a01f84acb9c624c38810ecfa11d99daa829b952b0bff43113080f9'
|
||||||
APPIMAGE_RUNTIME_X86_64_HASH: 'e3c4dfb70eddf42e7e5a1d28dff396d30563aa9a901970aebe6f01f3fecf9f8e'
|
APPIMAGE_RUNTIME_VERSION: 'r-2025-08-11'
|
||||||
|
APPIMAGE_RUNTIME_X86_64_HASH: 'e70ffa9b69b211574d0917adc482dd66f25a0083427b5945783965d55b0b0a8b'
|
||||||
|
|
||||||
permissions: # permissions required for attestation
|
permissions: # permissions required for attestation
|
||||||
id-token: 'write'
|
id-token: 'write'
|
||||||
@@ -127,9 +128,9 @@ jobs:
|
|||||||
- name: Install build-time dependencies
|
- name: Install build-time dependencies
|
||||||
run: |
|
run: |
|
||||||
echo "PYTHON=python3.12" >> $GITHUB_ENV
|
echo "PYTHON=python3.12" >> $GITHUB_ENV
|
||||||
wget -nv https://github.com/AppImage/appimagetool/releases/download/$APPIMAGETOOL_VERSION/appimagetool-x86_64.AppImage
|
wget -nv https://github.com/$APPIMAGE_FORK/appimagetool/releases/download/$APPIMAGETOOL_VERSION/appimagetool-x86_64.AppImage
|
||||||
echo "$APPIMAGETOOL_X86_64_HASH appimagetool-x86_64.AppImage" | sha256sum -c
|
echo "$APPIMAGETOOL_X86_64_HASH appimagetool-x86_64.AppImage" | sha256sum -c
|
||||||
wget -nv https://github.com/AppImage/type2-runtime/releases/download/$APPIMAGE_RUNTIME_VERSION/runtime-x86_64
|
wget -nv https://github.com/$APPIMAGE_FORK/type2-runtime/releases/download/$APPIMAGE_RUNTIME_VERSION/runtime-x86_64
|
||||||
echo "$APPIMAGE_RUNTIME_X86_64_HASH runtime-x86_64" | sha256sum -c
|
echo "$APPIMAGE_RUNTIME_X86_64_HASH runtime-x86_64" | sha256sum -c
|
||||||
chmod a+rx appimagetool-x86_64.AppImage
|
chmod a+rx appimagetool-x86_64.AppImage
|
||||||
./appimagetool-x86_64.AppImage --appimage-extract
|
./appimagetool-x86_64.AppImage --appimage-extract
|
||||||
|
|||||||
12
.github/workflows/unittests.yml
vendored
12
.github/workflows/unittests.yml
vendored
@@ -39,15 +39,15 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest]
|
os: [ubuntu-latest]
|
||||||
python:
|
python:
|
||||||
- {version: '3.10'}
|
- {version: '3.11.2'} # Change to '3.11' around 2026-06-10
|
||||||
- {version: '3.11'}
|
|
||||||
- {version: '3.12'}
|
- {version: '3.12'}
|
||||||
|
- {version: '3.13'}
|
||||||
include:
|
include:
|
||||||
- python: {version: '3.10'} # old compat
|
- python: {version: '3.11'} # old compat
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
- python: {version: '3.12'} # current
|
- python: {version: '3.13'} # current
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
- python: {version: '3.12'} # current
|
- python: {version: '3.13'} # current
|
||||||
os: macos-latest
|
os: macos-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
@@ -75,7 +75,7 @@ jobs:
|
|||||||
os:
|
os:
|
||||||
- ubuntu-latest
|
- ubuntu-latest
|
||||||
python:
|
python:
|
||||||
- {version: '3.12'} # current
|
- {version: '3.13'} # current
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|||||||
24
.run/Build APWorld.run.xml
Normal file
24
.run/Build APWorld.run.xml
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
<component name="ProjectRunConfigurationManager">
|
||||||
|
<configuration default="false" name="Build APWorld" type="PythonConfigurationType" factoryName="Python">
|
||||||
|
<module name="Archipelago" />
|
||||||
|
<option name="ENV_FILES" value="" />
|
||||||
|
<option name="INTERPRETER_OPTIONS" value="" />
|
||||||
|
<option name="PARENT_ENVS" value="true" />
|
||||||
|
<envs>
|
||||||
|
<env name="PYTHONUNBUFFERED" value="1" />
|
||||||
|
</envs>
|
||||||
|
<option name="SDK_HOME" value="" />
|
||||||
|
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$/" />
|
||||||
|
<option name="IS_MODULE_SDK" value="true" />
|
||||||
|
<option name="ADD_CONTENT_ROOTS" value="true" />
|
||||||
|
<option name="ADD_SOURCE_ROOTS" value="true" />
|
||||||
|
<option name="SCRIPT_NAME" value="$ContentRoot$/Launcher.py" />
|
||||||
|
<option name="PARAMETERS" value="\"Build APWorlds\"" />
|
||||||
|
<option name="SHOW_COMMAND_LINE" value="false" />
|
||||||
|
<option name="EMULATE_TERMINAL" value="false" />
|
||||||
|
<option name="MODULE_MODE" value="false" />
|
||||||
|
<option name="REDIRECT_INPUT" value="false" />
|
||||||
|
<option name="INPUT_FILE" value="" />
|
||||||
|
<method v="2" />
|
||||||
|
</configuration>
|
||||||
|
</component>
|
||||||
@@ -261,6 +261,7 @@ class MultiWorld():
|
|||||||
"local_items": set(item_link.get("local_items", [])),
|
"local_items": set(item_link.get("local_items", [])),
|
||||||
"non_local_items": set(item_link.get("non_local_items", [])),
|
"non_local_items": set(item_link.get("non_local_items", [])),
|
||||||
"link_replacement": replacement_prio.index(item_link["link_replacement"]),
|
"link_replacement": replacement_prio.index(item_link["link_replacement"]),
|
||||||
|
"skip_if_solo": item_link.get("skip_if_solo", False),
|
||||||
}
|
}
|
||||||
|
|
||||||
for _name, item_link in item_links.items():
|
for _name, item_link in item_links.items():
|
||||||
@@ -284,6 +285,8 @@ class MultiWorld():
|
|||||||
|
|
||||||
for group_name, item_link in item_links.items():
|
for group_name, item_link in item_links.items():
|
||||||
game = item_link["game"]
|
game = item_link["game"]
|
||||||
|
if item_link["skip_if_solo"] and len(item_link["players"]) == 1:
|
||||||
|
continue
|
||||||
group_id, group = self.add_group(group_name, game, set(item_link["players"]))
|
group_id, group = self.add_group(group_name, game, set(item_link["players"]))
|
||||||
|
|
||||||
group["item_pool"] = item_link["item_pool"]
|
group["item_pool"] = item_link["item_pool"]
|
||||||
@@ -1343,8 +1346,7 @@ class Region:
|
|||||||
for entrance in self.entrances: # BFS might be better here, trying DFS for now.
|
for entrance in self.entrances: # BFS might be better here, trying DFS for now.
|
||||||
return entrance.parent_region.get_connecting_entrance(is_main_entrance)
|
return entrance.parent_region.get_connecting_entrance(is_main_entrance)
|
||||||
|
|
||||||
def add_locations(self, locations: Dict[str, Optional[int]],
|
def add_locations(self, locations: Mapping[str, int | None], location_type: type[Location] | None = None) -> None:
|
||||||
location_type: Optional[type[Location]] = None) -> None:
|
|
||||||
"""
|
"""
|
||||||
Adds locations to the Region object, where location_type is your Location class and locations is a dict of
|
Adds locations to the Region object, where location_type is your Location class and locations is a dict of
|
||||||
location names to address.
|
location names to address.
|
||||||
@@ -1432,8 +1434,8 @@ class Region:
|
|||||||
entrance.connect(self)
|
entrance.connect(self)
|
||||||
return entrance
|
return entrance
|
||||||
|
|
||||||
def add_exits(self, exits: Union[Iterable[str], Dict[str, Optional[str]]],
|
def add_exits(self, exits: Iterable[str] | Mapping[str, str | None],
|
||||||
rules: Dict[str, Callable[[CollectionState], bool]] = None) -> List[Entrance]:
|
rules: Mapping[str, Callable[[CollectionState], bool]] | None = None) -> List[Entrance]:
|
||||||
"""
|
"""
|
||||||
Connects current region to regions in exit dictionary. Passed region names must exist first.
|
Connects current region to regions in exit dictionary. Passed region names must exist first.
|
||||||
|
|
||||||
@@ -1441,7 +1443,7 @@ class Region:
|
|||||||
created entrances will be named "self.name -> connecting_region"
|
created entrances will be named "self.name -> connecting_region"
|
||||||
:param rules: rules for the exits from this region. format is {"connecting_region": rule}
|
:param rules: rules for the exits from this region. format is {"connecting_region": rule}
|
||||||
"""
|
"""
|
||||||
if not isinstance(exits, Dict):
|
if not isinstance(exits, Mapping):
|
||||||
exits = dict.fromkeys(exits)
|
exits = dict.fromkeys(exits)
|
||||||
return [
|
return [
|
||||||
self.connect(
|
self.connect(
|
||||||
@@ -1855,6 +1857,9 @@ class Spoiler:
|
|||||||
Utils.__version__, self.multiworld.seed))
|
Utils.__version__, self.multiworld.seed))
|
||||||
outfile.write('Filling Algorithm: %s\n' % self.multiworld.algorithm)
|
outfile.write('Filling Algorithm: %s\n' % self.multiworld.algorithm)
|
||||||
outfile.write('Players: %d\n' % self.multiworld.players)
|
outfile.write('Players: %d\n' % self.multiworld.players)
|
||||||
|
if self.multiworld.players > 1:
|
||||||
|
loc_count = len([loc for loc in self.multiworld.get_locations() if not loc.is_event])
|
||||||
|
outfile.write('Total Location Count: %d\n' % loc_count)
|
||||||
outfile.write(f'Plando Options: {self.multiworld.plando_options}\n')
|
outfile.write(f'Plando Options: {self.multiworld.plando_options}\n')
|
||||||
AutoWorld.call_stage(self.multiworld, "write_spoiler_header", outfile)
|
AutoWorld.call_stage(self.multiworld, "write_spoiler_header", outfile)
|
||||||
|
|
||||||
@@ -1863,6 +1868,9 @@ class Spoiler:
|
|||||||
outfile.write('\nPlayer %d: %s\n' % (player, self.multiworld.get_player_name(player)))
|
outfile.write('\nPlayer %d: %s\n' % (player, self.multiworld.get_player_name(player)))
|
||||||
outfile.write('Game: %s\n' % self.multiworld.game[player])
|
outfile.write('Game: %s\n' % self.multiworld.game[player])
|
||||||
|
|
||||||
|
loc_count = len([loc for loc in self.multiworld.get_locations(player) if not loc.is_event])
|
||||||
|
outfile.write('Location Count: %d\n' % loc_count)
|
||||||
|
|
||||||
for f_option, option in self.multiworld.worlds[player].options_dataclass.type_hints.items():
|
for f_option, option in self.multiworld.worlds[player].options_dataclass.type_hints.items():
|
||||||
write_option(f_option, option)
|
write_option(f_option, option)
|
||||||
|
|
||||||
|
|||||||
@@ -99,17 +99,6 @@ class ClientCommandProcessor(CommandProcessor):
|
|||||||
self.ctx.on_print_json({"data": parts, "cmd": "PrintJSON"})
|
self.ctx.on_print_json({"data": parts, "cmd": "PrintJSON"})
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def get_current_datapackage(self) -> dict[str, typing.Any]:
|
|
||||||
"""
|
|
||||||
Return datapackage for current game if known.
|
|
||||||
|
|
||||||
:return: The datapackage for the currently registered game. If not found, an empty dictionary will be returned.
|
|
||||||
"""
|
|
||||||
if not self.ctx.game:
|
|
||||||
return {}
|
|
||||||
checksum = self.ctx.checksums[self.ctx.game]
|
|
||||||
return Utils.load_data_package_for_checksum(self.ctx.game, checksum)
|
|
||||||
|
|
||||||
def _cmd_missing(self, filter_text = "") -> bool:
|
def _cmd_missing(self, filter_text = "") -> bool:
|
||||||
"""List all missing location checks, from your local game state.
|
"""List all missing location checks, from your local game state.
|
||||||
Can be given text, which will be used as filter."""
|
Can be given text, which will be used as filter."""
|
||||||
@@ -119,8 +108,8 @@ class ClientCommandProcessor(CommandProcessor):
|
|||||||
count = 0
|
count = 0
|
||||||
checked_count = 0
|
checked_count = 0
|
||||||
|
|
||||||
lookup = self.get_current_datapackage().get("location_name_to_id", {})
|
lookup = self.ctx.location_names[self.ctx.game]
|
||||||
for location, location_id in lookup.items():
|
for location_id, location in lookup.items():
|
||||||
if filter_text and filter_text not in location:
|
if filter_text and filter_text not in location:
|
||||||
continue
|
continue
|
||||||
if location_id < 0:
|
if location_id < 0:
|
||||||
@@ -141,11 +130,10 @@ class ClientCommandProcessor(CommandProcessor):
|
|||||||
self.output("No missing location checks found.")
|
self.output("No missing location checks found.")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def output_datapackage_part(self, key: str, name: str) -> bool:
|
def output_datapackage_part(self, name: typing.Literal["Item Names", "Location Names"]) -> bool:
|
||||||
"""
|
"""
|
||||||
Helper to digest a specific section of this game's datapackage.
|
Helper to digest a specific section of this game's datapackage.
|
||||||
|
|
||||||
:param key: The dictionary key in the datapackage.
|
|
||||||
:param name: Printed to the user as context for the part.
|
:param name: Printed to the user as context for the part.
|
||||||
|
|
||||||
:return: Whether the process was successful.
|
:return: Whether the process was successful.
|
||||||
@@ -154,23 +142,20 @@ class ClientCommandProcessor(CommandProcessor):
|
|||||||
self.output(f"No game set, cannot determine {name}.")
|
self.output(f"No game set, cannot determine {name}.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
lookup = self.get_current_datapackage().get(key)
|
lookup = self.ctx.item_names if name == "Item Names" else self.ctx.location_names
|
||||||
if lookup is None:
|
lookup = lookup[self.ctx.game]
|
||||||
self.output("datapackage not yet loaded, try again")
|
|
||||||
return False
|
|
||||||
|
|
||||||
self.output(f"{name} for {self.ctx.game}")
|
self.output(f"{name} for {self.ctx.game}")
|
||||||
for key in lookup:
|
for name in lookup.values():
|
||||||
self.output(key)
|
self.output(name)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _cmd_items(self) -> bool:
|
def _cmd_items(self) -> bool:
|
||||||
"""List all item names for the currently running game."""
|
"""List all item names for the currently running game."""
|
||||||
return self.output_datapackage_part("item_name_to_id", "Item Names")
|
return self.output_datapackage_part("Item Names")
|
||||||
|
|
||||||
def _cmd_locations(self) -> bool:
|
def _cmd_locations(self) -> bool:
|
||||||
"""List all location names for the currently running game."""
|
"""List all location names for the currently running game."""
|
||||||
return self.output_datapackage_part("location_name_to_id", "Location Names")
|
return self.output_datapackage_part("Location Names")
|
||||||
|
|
||||||
def output_group_part(self, group_key: typing.Literal["item_name_groups", "location_name_groups"],
|
def output_group_part(self, group_key: typing.Literal["item_name_groups", "location_name_groups"],
|
||||||
filter_key: str,
|
filter_key: str,
|
||||||
@@ -871,9 +856,9 @@ async def server_loop(ctx: CommonContext, address: typing.Optional[str] = None)
|
|||||||
|
|
||||||
server_url = urllib.parse.urlparse(address)
|
server_url = urllib.parse.urlparse(address)
|
||||||
if server_url.username:
|
if server_url.username:
|
||||||
ctx.username = server_url.username
|
ctx.username = urllib.parse.unquote(server_url.username)
|
||||||
if server_url.password:
|
if server_url.password:
|
||||||
ctx.password = server_url.password
|
ctx.password = urllib.parse.unquote(server_url.password)
|
||||||
|
|
||||||
def reconnect_hint() -> str:
|
def reconnect_hint() -> str:
|
||||||
return ", type /connect to reconnect" if ctx.server_address else ""
|
return ", type /connect to reconnect" if ctx.server_address else ""
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ COPY requirements.txt WebHostLib/requirements.txt
|
|||||||
|
|
||||||
RUN pip install --no-cache-dir -r \
|
RUN pip install --no-cache-dir -r \
|
||||||
WebHostLib/requirements.txt \
|
WebHostLib/requirements.txt \
|
||||||
"setuptools<81"
|
"setuptools>=75,<81"
|
||||||
|
|
||||||
COPY _speedups.pyx .
|
COPY _speedups.pyx .
|
||||||
COPY intset.h .
|
COPY intset.h .
|
||||||
@@ -36,7 +36,7 @@ COPY intset.h .
|
|||||||
RUN cythonize -b -i _speedups.pyx
|
RUN cythonize -b -i _speedups.pyx
|
||||||
|
|
||||||
# Archipelago
|
# Archipelago
|
||||||
FROM python:3.12-slim AS archipelago
|
FROM python:3.12-slim-bookworm AS archipelago
|
||||||
ARG TARGETARCH
|
ARG TARGETARCH
|
||||||
ENV VIRTUAL_ENV=/opt/venv
|
ENV VIRTUAL_ENV=/opt/venv
|
||||||
ENV PYTHONUNBUFFERED=1
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
|||||||
8
Fill.py
8
Fill.py
@@ -129,6 +129,10 @@ def fill_restrictive(multiworld: MultiWorld, base_state: CollectionState, locati
|
|||||||
for i, location in enumerate(placements))
|
for i, location in enumerate(placements))
|
||||||
for (i, location, unsafe) in swap_attempts:
|
for (i, location, unsafe) in swap_attempts:
|
||||||
placed_item = location.item
|
placed_item = location.item
|
||||||
|
if item_to_place == placed_item:
|
||||||
|
# The number of allowed swaps is limited, so do not allow a swap of an item with a copy of
|
||||||
|
# itself.
|
||||||
|
continue
|
||||||
# Unplaceable items can sometimes be swapped infinitely. Limit the
|
# Unplaceable items can sometimes be swapped infinitely. Limit the
|
||||||
# number of times we will swap an individual item to prevent this
|
# number of times we will swap an individual item to prevent this
|
||||||
swap_count = swapped_items[placed_item.player, placed_item.name, unsafe]
|
swap_count = swapped_items[placed_item.player, placed_item.name, unsafe]
|
||||||
@@ -549,10 +553,12 @@ def distribute_items_restrictive(multiworld: MultiWorld,
|
|||||||
if prioritylocations and regular_progression:
|
if prioritylocations and regular_progression:
|
||||||
# retry with one_item_per_player off because some priority fills can fail to fill with that optimization
|
# retry with one_item_per_player off because some priority fills can fail to fill with that optimization
|
||||||
# deprioritized items are still not in the mix, so they need to be collected into state first.
|
# deprioritized items are still not in the mix, so they need to be collected into state first.
|
||||||
|
# allow_partial should only be set if there is deprioritized progression to fall back on.
|
||||||
priority_retry_state = sweep_from_pool(multiworld.state, deprioritized_progression)
|
priority_retry_state = sweep_from_pool(multiworld.state, deprioritized_progression)
|
||||||
fill_restrictive(multiworld, priority_retry_state, prioritylocations, regular_progression,
|
fill_restrictive(multiworld, priority_retry_state, prioritylocations, regular_progression,
|
||||||
single_player_placement=single_player, swap=False, on_place=mark_for_locking,
|
single_player_placement=single_player, swap=False, on_place=mark_for_locking,
|
||||||
name="Priority Retry", one_item_per_player=False, allow_partial=True)
|
name="Priority Retry", one_item_per_player=False,
|
||||||
|
allow_partial=bool(deprioritized_progression))
|
||||||
|
|
||||||
if prioritylocations and deprioritized_progression:
|
if prioritylocations and deprioritized_progression:
|
||||||
# There are no more regular progression items that can be placed on any priority locations.
|
# There are no more regular progression items that can be placed on any priority locations.
|
||||||
|
|||||||
58
Generate.py
58
Generate.py
@@ -23,7 +23,7 @@ from BaseClasses import seeddigits, get_seed, PlandoOptions
|
|||||||
from Utils import parse_yamls, version_tuple, __version__, tuplize_version
|
from Utils import parse_yamls, version_tuple, __version__, tuplize_version
|
||||||
|
|
||||||
|
|
||||||
def mystery_argparse():
|
def mystery_argparse(argv: list[str] | None = None):
|
||||||
from settings import get_settings
|
from settings import get_settings
|
||||||
settings = get_settings()
|
settings = get_settings()
|
||||||
defaults = settings.generator
|
defaults = settings.generator
|
||||||
@@ -57,7 +57,7 @@ def mystery_argparse():
|
|||||||
parser.add_argument("--spoiler_only", action="store_true",
|
parser.add_argument("--spoiler_only", action="store_true",
|
||||||
help="Skips generation assertion and multidata, outputting only a spoiler log. "
|
help="Skips generation assertion and multidata, outputting only a spoiler log. "
|
||||||
"Intended for debugging and testing purposes.")
|
"Intended for debugging and testing purposes.")
|
||||||
args = parser.parse_args()
|
args = parser.parse_args(argv)
|
||||||
|
|
||||||
if args.skip_output and args.spoiler_only:
|
if args.skip_output and args.spoiler_only:
|
||||||
parser.error("Cannot mix --skip_output and --spoiler_only")
|
parser.error("Cannot mix --skip_output and --spoiler_only")
|
||||||
@@ -166,19 +166,10 @@ def main(args=None) -> tuple[argparse.Namespace, int]:
|
|||||||
f"A mix is also permitted.")
|
f"A mix is also permitted.")
|
||||||
|
|
||||||
from worlds.AutoWorld import AutoWorldRegister
|
from worlds.AutoWorld import AutoWorldRegister
|
||||||
from worlds.alttp.EntranceRandomizer import parse_arguments
|
args.outputname = seed_name
|
||||||
erargs = parse_arguments(['--multi', str(args.multi)])
|
args.sprite = dict.fromkeys(range(1, args.multi+1), None)
|
||||||
erargs.seed = seed
|
args.sprite_pool = dict.fromkeys(range(1, args.multi+1), None)
|
||||||
erargs.plando_options = args.plando
|
args.name = {}
|
||||||
erargs.spoiler = args.spoiler
|
|
||||||
erargs.race = args.race
|
|
||||||
erargs.outputname = seed_name
|
|
||||||
erargs.outputpath = args.outputpath
|
|
||||||
erargs.skip_prog_balancing = args.skip_prog_balancing
|
|
||||||
erargs.skip_output = args.skip_output
|
|
||||||
erargs.spoiler_only = args.spoiler_only
|
|
||||||
erargs.name = {}
|
|
||||||
erargs.csv_output = args.csv_output
|
|
||||||
|
|
||||||
settings_cache: dict[str, tuple[argparse.Namespace, ...]] = \
|
settings_cache: dict[str, tuple[argparse.Namespace, ...]] = \
|
||||||
{fname: (tuple(roll_settings(yaml, args.plando) for yaml in yamls) if args.sameoptions else None)
|
{fname: (tuple(roll_settings(yaml, args.plando) for yaml in yamls) if args.sameoptions else None)
|
||||||
@@ -205,7 +196,7 @@ def main(args=None) -> tuple[argparse.Namespace, int]:
|
|||||||
for player in range(1, args.multi + 1):
|
for player in range(1, args.multi + 1):
|
||||||
player_path_cache[player] = player_files.get(player, args.weights_file_path)
|
player_path_cache[player] = player_files.get(player, args.weights_file_path)
|
||||||
name_counter = Counter()
|
name_counter = Counter()
|
||||||
erargs.player_options = {}
|
args.player_options = {}
|
||||||
|
|
||||||
player = 1
|
player = 1
|
||||||
while player <= args.multi:
|
while player <= args.multi:
|
||||||
@@ -218,21 +209,21 @@ def main(args=None) -> tuple[argparse.Namespace, int]:
|
|||||||
for k, v in vars(settingsObject).items():
|
for k, v in vars(settingsObject).items():
|
||||||
if v is not None:
|
if v is not None:
|
||||||
try:
|
try:
|
||||||
getattr(erargs, k)[player] = v
|
getattr(args, k)[player] = v
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
setattr(erargs, k, {player: v})
|
setattr(args, k, {player: v})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise Exception(f"Error setting {k} to {v} for player {player}") from e
|
raise Exception(f"Error setting {k} to {v} for player {player}") from e
|
||||||
|
|
||||||
# name was not specified
|
# name was not specified
|
||||||
if player not in erargs.name:
|
if player not in args.name:
|
||||||
if path == args.weights_file_path:
|
if path == args.weights_file_path:
|
||||||
# weights file, so we need to make the name unique
|
# weights file, so we need to make the name unique
|
||||||
erargs.name[player] = f"Player{player}"
|
args.name[player] = f"Player{player}"
|
||||||
else:
|
else:
|
||||||
# use the filename
|
# use the filename
|
||||||
erargs.name[player] = os.path.splitext(os.path.split(path)[-1])[0]
|
args.name[player] = os.path.splitext(os.path.split(path)[-1])[0]
|
||||||
erargs.name[player] = handle_name(erargs.name[player], player, name_counter)
|
args.name[player] = handle_name(args.name[player], player, name_counter)
|
||||||
|
|
||||||
player += 1
|
player += 1
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -240,10 +231,10 @@ def main(args=None) -> tuple[argparse.Namespace, int]:
|
|||||||
else:
|
else:
|
||||||
raise RuntimeError(f'No weights specified for player {player}')
|
raise RuntimeError(f'No weights specified for player {player}')
|
||||||
|
|
||||||
if len(set(name.lower() for name in erargs.name.values())) != len(erargs.name):
|
if len(set(name.lower() for name in args.name.values())) != len(args.name):
|
||||||
raise Exception(f"Names have to be unique. Names: {Counter(name.lower() for name in erargs.name.values())}")
|
raise Exception(f"Names have to be unique. Names: {Counter(name.lower() for name in args.name.values())}")
|
||||||
|
|
||||||
return erargs, seed
|
return args, seed
|
||||||
|
|
||||||
|
|
||||||
def read_weights_yamls(path) -> tuple[Any, ...]:
|
def read_weights_yamls(path) -> tuple[Any, ...]:
|
||||||
@@ -495,7 +486,22 @@ def roll_settings(weights: dict, plando_options: PlandoOptions = PlandoOptions.b
|
|||||||
if required_plando_options:
|
if required_plando_options:
|
||||||
raise Exception(f"Settings reports required plando module {str(required_plando_options)}, "
|
raise Exception(f"Settings reports required plando module {str(required_plando_options)}, "
|
||||||
f"which is not enabled.")
|
f"which is not enabled.")
|
||||||
|
games = requirements.get("game", {})
|
||||||
|
for game, version in games.items():
|
||||||
|
if game not in AutoWorldRegister.world_types:
|
||||||
|
continue
|
||||||
|
if not version:
|
||||||
|
raise Exception(f"Invalid version for game {game}: {version}.")
|
||||||
|
if isinstance(version, str):
|
||||||
|
version = {"min": version}
|
||||||
|
if "min" in version and tuplize_version(version["min"]) > AutoWorldRegister.world_types[game].world_version:
|
||||||
|
raise Exception(f"Settings reports required version of world \"{game}\" is at least {version['min']}, "
|
||||||
|
f"however world is of version "
|
||||||
|
f"{AutoWorldRegister.world_types[game].world_version.as_simple_string()}.")
|
||||||
|
if "max" in version and tuplize_version(version["max"]) < AutoWorldRegister.world_types[game].world_version:
|
||||||
|
raise Exception(f"Settings reports required version of world \"{game}\" is no later than {version['max']}, "
|
||||||
|
f"however world is of version "
|
||||||
|
f"{AutoWorldRegister.world_types[game].world_version.as_simple_string()}.")
|
||||||
ret = argparse.Namespace()
|
ret = argparse.Namespace()
|
||||||
for option_key in Options.PerGameCommonOptions.type_hints:
|
for option_key in Options.PerGameCommonOptions.type_hints:
|
||||||
if option_key in weights and option_key not in Options.CommonOptions.type_hints:
|
if option_key in weights and option_key not in Options.CommonOptions.type_hints:
|
||||||
|
|||||||
@@ -1,9 +0,0 @@
|
|||||||
if __name__ == '__main__':
|
|
||||||
import ModuleUpdate
|
|
||||||
ModuleUpdate.update()
|
|
||||||
|
|
||||||
import Utils
|
|
||||||
Utils.init_logging("KH1Client", exception_logger="Client")
|
|
||||||
|
|
||||||
from worlds.kh1.Client import launch
|
|
||||||
launch()
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
import ModuleUpdate
|
|
||||||
import Utils
|
|
||||||
from worlds.kh2.Client import launch
|
|
||||||
ModuleUpdate.update()
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
Utils.init_logging("KH2Client", exception_logger="Client")
|
|
||||||
launch()
|
|
||||||
@@ -484,7 +484,7 @@ def main(args: argparse.Namespace | dict | None = None):
|
|||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
init_logging('Launcher')
|
init_logging('Launcher')
|
||||||
Utils.freeze_support()
|
multiprocessing.freeze_support()
|
||||||
multiprocessing.set_start_method("spawn") # if launched process uses kivy, fork won't work
|
multiprocessing.set_start_method("spawn") # if launched process uses kivy, fork won't work
|
||||||
parser = argparse.ArgumentParser(
|
parser = argparse.ArgumentParser(
|
||||||
description='Archipelago Launcher',
|
description='Archipelago Launcher',
|
||||||
|
|||||||
13
Main.py
13
Main.py
@@ -37,7 +37,7 @@ def main(args, seed=None, baked_server_options: dict[str, object] | None = None)
|
|||||||
|
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
multiworld.set_seed(seed, args.race, str(args.outputname) if args.outputname else None)
|
multiworld.set_seed(seed, args.race, str(args.outputname) if args.outputname else None)
|
||||||
multiworld.plando_options = args.plando_options
|
multiworld.plando_options = args.plando
|
||||||
multiworld.game = args.game.copy()
|
multiworld.game = args.game.copy()
|
||||||
multiworld.player_name = args.name.copy()
|
multiworld.player_name = args.name.copy()
|
||||||
multiworld.sprite = args.sprite.copy()
|
multiworld.sprite = args.sprite.copy()
|
||||||
@@ -54,12 +54,17 @@ def main(args, seed=None, baked_server_options: dict[str, object] | None = None)
|
|||||||
logger.info(f"Found {len(AutoWorld.AutoWorldRegister.world_types)} World Types:")
|
logger.info(f"Found {len(AutoWorld.AutoWorldRegister.world_types)} World Types:")
|
||||||
longest_name = max(len(text) for text in AutoWorld.AutoWorldRegister.world_types)
|
longest_name = max(len(text) for text in AutoWorld.AutoWorldRegister.world_types)
|
||||||
|
|
||||||
item_count = len(str(max(len(cls.item_names) for cls in AutoWorld.AutoWorldRegister.world_types.values())))
|
world_classes = AutoWorld.AutoWorldRegister.world_types.values()
|
||||||
location_count = len(str(max(len(cls.location_names) for cls in AutoWorld.AutoWorldRegister.world_types.values())))
|
|
||||||
|
version_count = max(len(cls.world_version.as_simple_string()) for cls in world_classes)
|
||||||
|
item_count = len(str(max(len(cls.item_names) for cls in world_classes)))
|
||||||
|
location_count = len(str(max(len(cls.location_names) for cls in world_classes)))
|
||||||
|
|
||||||
for name, cls in AutoWorld.AutoWorldRegister.world_types.items():
|
for name, cls in AutoWorld.AutoWorldRegister.world_types.items():
|
||||||
if not cls.hidden and len(cls.item_names) > 0:
|
if not cls.hidden and len(cls.item_names) > 0:
|
||||||
logger.info(f" {name:{longest_name}}: Items: {len(cls.item_names):{item_count}} | "
|
logger.info(f" {name:{longest_name}}: "
|
||||||
|
f"v{cls.world_version.as_simple_string():{version_count}} | "
|
||||||
|
f"Items: {len(cls.item_names):{item_count}} | "
|
||||||
f"Locations: {len(cls.location_names):{location_count}}")
|
f"Locations: {len(cls.location_names):{location_count}}")
|
||||||
|
|
||||||
del item_count, location_count
|
del item_count, location_count
|
||||||
|
|||||||
@@ -5,15 +5,15 @@ import multiprocessing
|
|||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
|
|
||||||
if sys.platform in ("win32", "darwin") and sys.version_info < (3, 10, 11):
|
if sys.platform in ("win32", "darwin") and sys.version_info < (3, 11, 9):
|
||||||
# Official micro version updates. This should match the number in docs/running from source.md.
|
# Official micro version updates. This should match the number in docs/running from source.md.
|
||||||
raise RuntimeError(f"Incompatible Python Version found: {sys.version_info}. Official 3.10.15+ is supported.")
|
raise RuntimeError(f"Incompatible Python Version found: {sys.version_info}. Official 3.11.9+ is supported.")
|
||||||
elif sys.platform in ("win32", "darwin") and sys.version_info < (3, 10, 15):
|
elif sys.platform in ("win32", "darwin") and sys.version_info < (3, 11, 13):
|
||||||
# There are known security issues, but no easy way to install fixed versions on Windows for testing.
|
# There are known security issues, but no easy way to install fixed versions on Windows for testing.
|
||||||
warnings.warn(f"Python Version {sys.version_info} has security issues. Don't use in production.")
|
warnings.warn(f"Python Version {sys.version_info} has security issues. Don't use in production.")
|
||||||
elif sys.version_info < (3, 10, 1):
|
elif sys.version_info < (3, 11, 0):
|
||||||
# Other platforms may get security backports instead of micro updates, so the number is unreliable.
|
# Other platforms may get security backports instead of micro updates, so the number is unreliable.
|
||||||
raise RuntimeError(f"Incompatible Python Version found: {sys.version_info}. 3.10.1+ is supported.")
|
raise RuntimeError(f"Incompatible Python Version found: {sys.version_info}. 3.11.0+ is supported.")
|
||||||
|
|
||||||
# don't run update if environment is frozen/compiled or if not the parent process (skip in subprocess)
|
# don't run update if environment is frozen/compiled or if not the parent process (skip in subprocess)
|
||||||
_skip_update = bool(
|
_skip_update = bool(
|
||||||
@@ -74,11 +74,11 @@ def update_command():
|
|||||||
def install_pkg_resources(yes=False):
|
def install_pkg_resources(yes=False):
|
||||||
try:
|
try:
|
||||||
import pkg_resources # noqa: F401
|
import pkg_resources # noqa: F401
|
||||||
except ImportError:
|
except (AttributeError, ImportError):
|
||||||
check_pip()
|
check_pip()
|
||||||
if not yes:
|
if not yes:
|
||||||
confirm("pkg_resources not found, press enter to install it")
|
confirm("pkg_resources not found, press enter to install it")
|
||||||
subprocess.call([sys.executable, "-m", "pip", "install", "--upgrade", "setuptools<81"])
|
subprocess.call([sys.executable, "-m", "pip", "install", "--upgrade", "setuptools>=75,<81"])
|
||||||
|
|
||||||
|
|
||||||
def update(yes: bool = False, force: bool = False) -> None:
|
def update(yes: bool = False, force: bool = False) -> None:
|
||||||
|
|||||||
221
MultiServer.py
221
MultiServer.py
@@ -32,7 +32,7 @@ if typing.TYPE_CHECKING:
|
|||||||
|
|
||||||
import colorama
|
import colorama
|
||||||
import websockets
|
import websockets
|
||||||
from websockets.extensions.permessage_deflate import PerMessageDeflate
|
from websockets.extensions.permessage_deflate import PerMessageDeflate, ServerPerMessageDeflateFactory
|
||||||
try:
|
try:
|
||||||
# ponyorm is a requirement for webhost, not default server, so may not be importable
|
# ponyorm is a requirement for webhost, not default server, so may not be importable
|
||||||
from pony.orm.dbapiprovider import OperationalError
|
from pony.orm.dbapiprovider import OperationalError
|
||||||
@@ -50,6 +50,15 @@ from BaseClasses import ItemClassification
|
|||||||
min_client_version = Version(0, 5, 0)
|
min_client_version = Version(0, 5, 0)
|
||||||
colorama.just_fix_windows_console()
|
colorama.just_fix_windows_console()
|
||||||
|
|
||||||
|
no_version = Version(0, 0, 0)
|
||||||
|
assert isinstance(no_version, tuple) # assert immutable
|
||||||
|
|
||||||
|
server_per_message_deflate_factory = ServerPerMessageDeflateFactory(
|
||||||
|
server_max_window_bits=11,
|
||||||
|
client_max_window_bits=11,
|
||||||
|
compress_settings={"memLevel": 4},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def remove_from_list(container, value):
|
def remove_from_list(container, value):
|
||||||
try:
|
try:
|
||||||
@@ -125,8 +134,31 @@ def get_saving_second(seed_name: str, interval: int = 60) -> int:
|
|||||||
|
|
||||||
|
|
||||||
class Client(Endpoint):
|
class Client(Endpoint):
|
||||||
version = Version(0, 0, 0)
|
__slots__ = (
|
||||||
tags: typing.List[str]
|
"__weakref__",
|
||||||
|
"version",
|
||||||
|
"auth",
|
||||||
|
"team",
|
||||||
|
"slot",
|
||||||
|
"send_index",
|
||||||
|
"tags",
|
||||||
|
"messageprocessor",
|
||||||
|
"ctx",
|
||||||
|
"remote_items",
|
||||||
|
"remote_start_inventory",
|
||||||
|
"no_items",
|
||||||
|
"no_locations",
|
||||||
|
"no_text",
|
||||||
|
)
|
||||||
|
|
||||||
|
version: Version
|
||||||
|
auth: bool
|
||||||
|
team: int | None
|
||||||
|
slot: int | None
|
||||||
|
send_index: int
|
||||||
|
tags: list[str]
|
||||||
|
messageprocessor: ClientMessageProcessor
|
||||||
|
ctx: weakref.ref[Context]
|
||||||
remote_items: bool
|
remote_items: bool
|
||||||
remote_start_inventory: bool
|
remote_start_inventory: bool
|
||||||
no_items: bool
|
no_items: bool
|
||||||
@@ -135,6 +167,7 @@ class Client(Endpoint):
|
|||||||
|
|
||||||
def __init__(self, socket: "ServerConnection", ctx: Context) -> None:
|
def __init__(self, socket: "ServerConnection", ctx: Context) -> None:
|
||||||
super().__init__(socket)
|
super().__init__(socket)
|
||||||
|
self.version = no_version
|
||||||
self.auth = False
|
self.auth = False
|
||||||
self.team = None
|
self.team = None
|
||||||
self.slot = None
|
self.slot = None
|
||||||
@@ -142,6 +175,11 @@ class Client(Endpoint):
|
|||||||
self.tags = []
|
self.tags = []
|
||||||
self.messageprocessor = client_message_processor(ctx, self)
|
self.messageprocessor = client_message_processor(ctx, self)
|
||||||
self.ctx = weakref.ref(ctx)
|
self.ctx = weakref.ref(ctx)
|
||||||
|
self.remote_items = False
|
||||||
|
self.remote_start_inventory = False
|
||||||
|
self.no_items = False
|
||||||
|
self.no_locations = False
|
||||||
|
self.no_text = False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def items_handling(self):
|
def items_handling(self):
|
||||||
@@ -179,6 +217,7 @@ class Context:
|
|||||||
"release_mode": str,
|
"release_mode": str,
|
||||||
"remaining_mode": str,
|
"remaining_mode": str,
|
||||||
"collect_mode": str,
|
"collect_mode": str,
|
||||||
|
"countdown_mode": str,
|
||||||
"item_cheat": bool,
|
"item_cheat": bool,
|
||||||
"compatibility": int}
|
"compatibility": int}
|
||||||
# team -> slot id -> list of clients authenticated to slot.
|
# team -> slot id -> list of clients authenticated to slot.
|
||||||
@@ -208,8 +247,8 @@ class Context:
|
|||||||
|
|
||||||
def __init__(self, host: str, port: int, server_password: str, password: str, location_check_points: int,
|
def __init__(self, host: str, port: int, server_password: str, password: str, location_check_points: int,
|
||||||
hint_cost: int, item_cheat: bool, release_mode: str = "disabled", collect_mode="disabled",
|
hint_cost: int, item_cheat: bool, release_mode: str = "disabled", collect_mode="disabled",
|
||||||
remaining_mode: str = "disabled", auto_shutdown: typing.SupportsFloat = 0, compatibility: int = 2,
|
countdown_mode: str = "auto", remaining_mode: str = "disabled", auto_shutdown: typing.SupportsFloat = 0,
|
||||||
log_network: bool = False, logger: logging.Logger = logging.getLogger()):
|
compatibility: int = 2, log_network: bool = False, logger: logging.Logger = logging.getLogger()):
|
||||||
self.logger = logger
|
self.logger = logger
|
||||||
super(Context, self).__init__()
|
super(Context, self).__init__()
|
||||||
self.slot_info = {}
|
self.slot_info = {}
|
||||||
@@ -242,6 +281,7 @@ class Context:
|
|||||||
self.release_mode: str = release_mode
|
self.release_mode: str = release_mode
|
||||||
self.remaining_mode: str = remaining_mode
|
self.remaining_mode: str = remaining_mode
|
||||||
self.collect_mode: str = collect_mode
|
self.collect_mode: str = collect_mode
|
||||||
|
self.countdown_mode: str = countdown_mode
|
||||||
self.item_cheat = item_cheat
|
self.item_cheat = item_cheat
|
||||||
self.exit_event = asyncio.Event()
|
self.exit_event = asyncio.Event()
|
||||||
self.client_activity_timers: typing.Dict[
|
self.client_activity_timers: typing.Dict[
|
||||||
@@ -627,6 +667,7 @@ class Context:
|
|||||||
"server_password": self.server_password, "password": self.password,
|
"server_password": self.server_password, "password": self.password,
|
||||||
"release_mode": self.release_mode,
|
"release_mode": self.release_mode,
|
||||||
"remaining_mode": self.remaining_mode, "collect_mode": self.collect_mode,
|
"remaining_mode": self.remaining_mode, "collect_mode": self.collect_mode,
|
||||||
|
"countdown_mode": self.countdown_mode,
|
||||||
"item_cheat": self.item_cheat, "compatibility": self.compatibility}
|
"item_cheat": self.item_cheat, "compatibility": self.compatibility}
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -661,6 +702,7 @@ class Context:
|
|||||||
self.release_mode = savedata["game_options"]["release_mode"]
|
self.release_mode = savedata["game_options"]["release_mode"]
|
||||||
self.remaining_mode = savedata["game_options"]["remaining_mode"]
|
self.remaining_mode = savedata["game_options"]["remaining_mode"]
|
||||||
self.collect_mode = savedata["game_options"]["collect_mode"]
|
self.collect_mode = savedata["game_options"]["collect_mode"]
|
||||||
|
self.countdown_mode = savedata["game_options"].get("countdown_mode", self.countdown_mode)
|
||||||
self.item_cheat = savedata["game_options"]["item_cheat"]
|
self.item_cheat = savedata["game_options"]["item_cheat"]
|
||||||
self.compatibility = savedata["game_options"]["compatibility"]
|
self.compatibility = savedata["game_options"]["compatibility"]
|
||||||
|
|
||||||
@@ -1135,8 +1177,13 @@ def register_location_checks(ctx: Context, team: int, slot: int, locations: typi
|
|||||||
ctx.save()
|
ctx.save()
|
||||||
|
|
||||||
|
|
||||||
def collect_hints(ctx: Context, team: int, slot: int, item: typing.Union[int, str], auto_status: HintStatus) \
|
def collect_hints(ctx: Context, team: int, slot: int, item: typing.Union[int, str],
|
||||||
-> typing.List[Hint]:
|
status: HintStatus | None = None) -> typing.List[Hint]:
|
||||||
|
"""
|
||||||
|
Collect a new hint for a given item id or name, with a given status.
|
||||||
|
If status is None (which is the default value), an automatic status will be determined from the item's quality.
|
||||||
|
"""
|
||||||
|
|
||||||
hints = []
|
hints = []
|
||||||
slots: typing.Set[int] = {slot}
|
slots: typing.Set[int] = {slot}
|
||||||
for group_id, group in ctx.groups.items():
|
for group_id, group in ctx.groups.items():
|
||||||
@@ -1152,25 +1199,39 @@ def collect_hints(ctx: Context, team: int, slot: int, item: typing.Union[int, st
|
|||||||
else:
|
else:
|
||||||
found = location_id in ctx.location_checks[team, finding_player]
|
found = location_id in ctx.location_checks[team, finding_player]
|
||||||
entrance = ctx.er_hint_data.get(finding_player, {}).get(location_id, "")
|
entrance = ctx.er_hint_data.get(finding_player, {}).get(location_id, "")
|
||||||
new_status = auto_status
|
|
||||||
|
hint_status = status # Assign again because we're in a for loop
|
||||||
if found:
|
if found:
|
||||||
new_status = HintStatus.HINT_FOUND
|
hint_status = HintStatus.HINT_FOUND
|
||||||
elif item_flags & ItemClassification.trap:
|
elif hint_status is None:
|
||||||
new_status = HintStatus.HINT_AVOID
|
if item_flags & ItemClassification.trap:
|
||||||
hints.append(Hint(receiving_player, finding_player, location_id, item_id, found, entrance,
|
hint_status = HintStatus.HINT_AVOID
|
||||||
item_flags, new_status))
|
else:
|
||||||
|
hint_status = HintStatus.HINT_PRIORITY
|
||||||
|
|
||||||
|
hints.append(
|
||||||
|
Hint(receiving_player, finding_player, location_id, item_id, found, entrance, item_flags, hint_status)
|
||||||
|
)
|
||||||
|
|
||||||
return hints
|
return hints
|
||||||
|
|
||||||
|
|
||||||
def collect_hint_location_name(ctx: Context, team: int, slot: int, location: str, auto_status: HintStatus) \
|
def collect_hint_location_name(ctx: Context, team: int, slot: int, location: str,
|
||||||
-> typing.List[Hint]:
|
status: HintStatus | None = HintStatus.HINT_UNSPECIFIED) -> typing.List[Hint]:
|
||||||
|
"""
|
||||||
|
Collect a new hint for a given location name, with a given status (defaults to "unspecified").
|
||||||
|
If None is passed for the status, then an automatic status will be determined from the item's quality.
|
||||||
|
"""
|
||||||
seeked_location: int = ctx.location_names_for_game(ctx.games[slot])[location]
|
seeked_location: int = ctx.location_names_for_game(ctx.games[slot])[location]
|
||||||
return collect_hint_location_id(ctx, team, slot, seeked_location, auto_status)
|
return collect_hint_location_id(ctx, team, slot, seeked_location, status)
|
||||||
|
|
||||||
|
|
||||||
def collect_hint_location_id(ctx: Context, team: int, slot: int, seeked_location: int, auto_status: HintStatus) \
|
def collect_hint_location_id(ctx: Context, team: int, slot: int, seeked_location: int,
|
||||||
-> typing.List[Hint]:
|
status: HintStatus | None = HintStatus.HINT_UNSPECIFIED) -> typing.List[Hint]:
|
||||||
|
"""
|
||||||
|
Collect a new hint for a given location id, with a given status (defaults to "unspecified").
|
||||||
|
If None is passed for the status, then an automatic status will be determined from the item's quality.
|
||||||
|
"""
|
||||||
prev_hint = ctx.get_hint(team, slot, seeked_location)
|
prev_hint = ctx.get_hint(team, slot, seeked_location)
|
||||||
if prev_hint:
|
if prev_hint:
|
||||||
return [prev_hint]
|
return [prev_hint]
|
||||||
@@ -1180,13 +1241,16 @@ def collect_hint_location_id(ctx: Context, team: int, slot: int, seeked_location
|
|||||||
|
|
||||||
found = seeked_location in ctx.location_checks[team, slot]
|
found = seeked_location in ctx.location_checks[team, slot]
|
||||||
entrance = ctx.er_hint_data.get(slot, {}).get(seeked_location, "")
|
entrance = ctx.er_hint_data.get(slot, {}).get(seeked_location, "")
|
||||||
new_status = auto_status
|
|
||||||
if found:
|
if found:
|
||||||
new_status = HintStatus.HINT_FOUND
|
status = HintStatus.HINT_FOUND
|
||||||
elif item_flags & ItemClassification.trap:
|
elif status is None:
|
||||||
new_status = HintStatus.HINT_AVOID
|
if item_flags & ItemClassification.trap:
|
||||||
return [Hint(receiving_player, slot, seeked_location, item_id, found, entrance, item_flags,
|
status = HintStatus.HINT_AVOID
|
||||||
new_status)]
|
else:
|
||||||
|
status = HintStatus.HINT_PRIORITY
|
||||||
|
|
||||||
|
return [Hint(receiving_player, slot, seeked_location, item_id, found, entrance, item_flags, status)]
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
|
||||||
@@ -1300,7 +1364,8 @@ class CommandProcessor(metaclass=CommandMeta):
|
|||||||
argname += "=" + parameter.default
|
argname += "=" + parameter.default
|
||||||
argtext += argname
|
argtext += argname
|
||||||
argtext += " "
|
argtext += " "
|
||||||
s += f"{self.marker}{command} {argtext}\n {method.__doc__}\n"
|
doctext = '\n '.join(inspect.getdoc(method).split('\n'))
|
||||||
|
s += f"{self.marker}{command} {argtext}\n {doctext}\n"
|
||||||
return s
|
return s
|
||||||
|
|
||||||
def _cmd_help(self):
|
def _cmd_help(self):
|
||||||
@@ -1329,19 +1394,6 @@ class CommandProcessor(metaclass=CommandMeta):
|
|||||||
class CommonCommandProcessor(CommandProcessor):
|
class CommonCommandProcessor(CommandProcessor):
|
||||||
ctx: Context
|
ctx: Context
|
||||||
|
|
||||||
def _cmd_countdown(self, seconds: str = "10") -> bool:
|
|
||||||
"""Start a countdown in seconds"""
|
|
||||||
try:
|
|
||||||
timer = int(seconds, 10)
|
|
||||||
except ValueError:
|
|
||||||
timer = 10
|
|
||||||
else:
|
|
||||||
if timer > 60 * 60:
|
|
||||||
raise ValueError(f"{timer} is invalid. Maximum is 1 hour.")
|
|
||||||
|
|
||||||
async_start(countdown(self.ctx, timer))
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _cmd_options(self):
|
def _cmd_options(self):
|
||||||
"""List all current options. Warning: lists password."""
|
"""List all current options. Warning: lists password."""
|
||||||
self.output("Current options:")
|
self.output("Current options:")
|
||||||
@@ -1483,6 +1535,23 @@ class ClientMessageProcessor(CommonCommandProcessor):
|
|||||||
" You can ask the server admin for a /collect")
|
" You can ask the server admin for a /collect")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def _cmd_countdown(self, seconds: str = "10") -> bool:
|
||||||
|
"""Start a countdown in seconds"""
|
||||||
|
if self.ctx.countdown_mode == "disabled" or \
|
||||||
|
self.ctx.countdown_mode == "auto" and len(self.ctx.player_names) >= 30:
|
||||||
|
self.output("Sorry, client countdowns have been disabled on this server. You can ask the server admin for a /countdown")
|
||||||
|
return False
|
||||||
|
try:
|
||||||
|
timer = int(seconds, 10)
|
||||||
|
except ValueError:
|
||||||
|
timer = 10
|
||||||
|
else:
|
||||||
|
if timer > 60 * 60:
|
||||||
|
raise ValueError(f"{timer} is invalid. Maximum is 1 hour.")
|
||||||
|
|
||||||
|
async_start(countdown(self.ctx, timer))
|
||||||
|
return True
|
||||||
|
|
||||||
def _cmd_remaining(self) -> bool:
|
def _cmd_remaining(self) -> bool:
|
||||||
"""List remaining items in your game, but not their location or recipient"""
|
"""List remaining items in your game, but not their location or recipient"""
|
||||||
if self.ctx.remaining_mode == "enabled":
|
if self.ctx.remaining_mode == "enabled":
|
||||||
@@ -1610,7 +1679,6 @@ class ClientMessageProcessor(CommonCommandProcessor):
|
|||||||
def get_hints(self, input_text: str, for_location: bool = False) -> bool:
|
def get_hints(self, input_text: str, for_location: bool = False) -> bool:
|
||||||
points_available = get_client_points(self.ctx, self.client)
|
points_available = get_client_points(self.ctx, self.client)
|
||||||
cost = self.ctx.get_hint_cost(self.client.slot)
|
cost = self.ctx.get_hint_cost(self.client.slot)
|
||||||
auto_status = HintStatus.HINT_UNSPECIFIED if for_location else HintStatus.HINT_PRIORITY
|
|
||||||
if not input_text:
|
if not input_text:
|
||||||
hints = {hint.re_check(self.ctx, self.client.team) for hint in
|
hints = {hint.re_check(self.ctx, self.client.team) for hint in
|
||||||
self.ctx.hints[self.client.team, self.client.slot]}
|
self.ctx.hints[self.client.team, self.client.slot]}
|
||||||
@@ -1636,9 +1704,9 @@ class ClientMessageProcessor(CommonCommandProcessor):
|
|||||||
self.output(f"Sorry, \"{hint_name}\" is marked as non-hintable.")
|
self.output(f"Sorry, \"{hint_name}\" is marked as non-hintable.")
|
||||||
hints = []
|
hints = []
|
||||||
elif not for_location:
|
elif not for_location:
|
||||||
hints = collect_hints(self.ctx, self.client.team, self.client.slot, hint_id, auto_status)
|
hints = collect_hints(self.ctx, self.client.team, self.client.slot, hint_id)
|
||||||
else:
|
else:
|
||||||
hints = collect_hint_location_id(self.ctx, self.client.team, self.client.slot, hint_id, auto_status)
|
hints = collect_hint_location_id(self.ctx, self.client.team, self.client.slot, hint_id)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
game = self.ctx.games[self.client.slot]
|
game = self.ctx.games[self.client.slot]
|
||||||
@@ -1658,16 +1726,18 @@ class ClientMessageProcessor(CommonCommandProcessor):
|
|||||||
hints = []
|
hints = []
|
||||||
for item_name in self.ctx.item_name_groups[game][hint_name]:
|
for item_name in self.ctx.item_name_groups[game][hint_name]:
|
||||||
if item_name in self.ctx.item_names_for_game(game): # ensure item has an ID
|
if item_name in self.ctx.item_names_for_game(game): # ensure item has an ID
|
||||||
hints.extend(collect_hints(self.ctx, self.client.team, self.client.slot, item_name, auto_status))
|
hints.extend(collect_hints(self.ctx, self.client.team, self.client.slot, item_name))
|
||||||
elif not for_location and hint_name in self.ctx.item_names_for_game(game): # item name
|
elif not for_location and hint_name in self.ctx.item_names_for_game(game): # item name
|
||||||
hints = collect_hints(self.ctx, self.client.team, self.client.slot, hint_name, auto_status)
|
hints = collect_hints(self.ctx, self.client.team, self.client.slot, hint_name)
|
||||||
elif hint_name in self.ctx.location_name_groups[game]: # location group name
|
elif hint_name in self.ctx.location_name_groups[game]: # location group name
|
||||||
hints = []
|
hints = []
|
||||||
for loc_name in self.ctx.location_name_groups[game][hint_name]:
|
for loc_name in self.ctx.location_name_groups[game][hint_name]:
|
||||||
if loc_name in self.ctx.location_names_for_game(game):
|
if loc_name in self.ctx.location_names_for_game(game):
|
||||||
hints.extend(collect_hint_location_name(self.ctx, self.client.team, self.client.slot, loc_name, auto_status))
|
hints.extend(
|
||||||
|
collect_hint_location_name(self.ctx, self.client.team, self.client.slot, loc_name)
|
||||||
|
)
|
||||||
else: # location name
|
else: # location name
|
||||||
hints = collect_hint_location_name(self.ctx, self.client.team, self.client.slot, hint_name, auto_status)
|
hints = collect_hint_location_name(self.ctx, self.client.team, self.client.slot, hint_name)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.output(response)
|
self.output(response)
|
||||||
@@ -1945,8 +2015,7 @@ async def process_client_cmd(ctx: Context, client: Client, args: dict):
|
|||||||
|
|
||||||
target_item, target_player, flags = ctx.locations[client.slot][location]
|
target_item, target_player, flags = ctx.locations[client.slot][location]
|
||||||
if create_as_hint:
|
if create_as_hint:
|
||||||
hints.extend(collect_hint_location_id(ctx, client.team, client.slot, location,
|
hints.extend(collect_hint_location_id(ctx, client.team, client.slot, location))
|
||||||
HintStatus.HINT_UNSPECIFIED))
|
|
||||||
locs.append(NetworkItem(target_item, location, target_player, flags))
|
locs.append(NetworkItem(target_item, location, target_player, flags))
|
||||||
ctx.notify_hints(client.team, hints, only_new=create_as_hint == 2, persist_even_if_found=True)
|
ctx.notify_hints(client.team, hints, only_new=create_as_hint == 2, persist_even_if_found=True)
|
||||||
if locs and create_as_hint:
|
if locs and create_as_hint:
|
||||||
@@ -1961,6 +2030,16 @@ async def process_client_cmd(ctx: Context, client: Client, args: dict):
|
|||||||
if not locations:
|
if not locations:
|
||||||
await ctx.send_msgs(client, [{"cmd": "InvalidPacket", "type": "arguments",
|
await ctx.send_msgs(client, [{"cmd": "InvalidPacket", "type": "arguments",
|
||||||
"text": "CreateHints: No locations specified.", "original_cmd": cmd}])
|
"text": "CreateHints: No locations specified.", "original_cmd": cmd}])
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
status = HintStatus(status)
|
||||||
|
except ValueError as err:
|
||||||
|
await ctx.send_msgs(client,
|
||||||
|
[{"cmd": "InvalidPacket", "type": "arguments",
|
||||||
|
"text": f"Unknown Status: {err}",
|
||||||
|
"original_cmd": cmd}])
|
||||||
|
return
|
||||||
|
|
||||||
hints = []
|
hints = []
|
||||||
|
|
||||||
@@ -2228,6 +2307,19 @@ class ServerCommandProcessor(CommonCommandProcessor):
|
|||||||
self.output(f"Could not find player {player_name} to collect")
|
self.output(f"Could not find player {player_name} to collect")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def _cmd_countdown(self, seconds: str = "10") -> bool:
|
||||||
|
"""Start a countdown in seconds"""
|
||||||
|
try:
|
||||||
|
timer = int(seconds, 10)
|
||||||
|
except ValueError:
|
||||||
|
timer = 10
|
||||||
|
else:
|
||||||
|
if timer > 60 * 60:
|
||||||
|
raise ValueError(f"{timer} is invalid. Maximum is 1 hour.")
|
||||||
|
|
||||||
|
async_start(countdown(self.ctx, timer))
|
||||||
|
return True
|
||||||
|
|
||||||
@mark_raw
|
@mark_raw
|
||||||
def _cmd_release(self, player_name: str) -> bool:
|
def _cmd_release(self, player_name: str) -> bool:
|
||||||
"""Send out the remaining items from a player to their intended recipients."""
|
"""Send out the remaining items from a player to their intended recipients."""
|
||||||
@@ -2349,9 +2441,9 @@ class ServerCommandProcessor(CommonCommandProcessor):
|
|||||||
hints = []
|
hints = []
|
||||||
for item_name_from_group in self.ctx.item_name_groups[game][item]:
|
for item_name_from_group in self.ctx.item_name_groups[game][item]:
|
||||||
if item_name_from_group in self.ctx.item_names_for_game(game): # ensure item has an ID
|
if item_name_from_group in self.ctx.item_names_for_game(game): # ensure item has an ID
|
||||||
hints.extend(collect_hints(self.ctx, team, slot, item_name_from_group, HintStatus.HINT_PRIORITY))
|
hints.extend(collect_hints(self.ctx, team, slot, item_name_from_group))
|
||||||
else: # item name or id
|
else: # item name or id
|
||||||
hints = collect_hints(self.ctx, team, slot, item, HintStatus.HINT_PRIORITY)
|
hints = collect_hints(self.ctx, team, slot, item)
|
||||||
|
|
||||||
if hints:
|
if hints:
|
||||||
self.ctx.notify_hints(team, hints)
|
self.ctx.notify_hints(team, hints)
|
||||||
@@ -2385,17 +2477,14 @@ class ServerCommandProcessor(CommonCommandProcessor):
|
|||||||
|
|
||||||
if usable:
|
if usable:
|
||||||
if isinstance(location, int):
|
if isinstance(location, int):
|
||||||
hints = collect_hint_location_id(self.ctx, team, slot, location,
|
hints = collect_hint_location_id(self.ctx, team, slot, location)
|
||||||
HintStatus.HINT_UNSPECIFIED)
|
|
||||||
elif game in self.ctx.location_name_groups and location in self.ctx.location_name_groups[game]:
|
elif game in self.ctx.location_name_groups and location in self.ctx.location_name_groups[game]:
|
||||||
hints = []
|
hints = []
|
||||||
for loc_name_from_group in self.ctx.location_name_groups[game][location]:
|
for loc_name_from_group in self.ctx.location_name_groups[game][location]:
|
||||||
if loc_name_from_group in self.ctx.location_names_for_game(game):
|
if loc_name_from_group in self.ctx.location_names_for_game(game):
|
||||||
hints.extend(collect_hint_location_name(self.ctx, team, slot, loc_name_from_group,
|
hints.extend(collect_hint_location_name(self.ctx, team, slot, loc_name_from_group))
|
||||||
HintStatus.HINT_UNSPECIFIED))
|
|
||||||
else:
|
else:
|
||||||
hints = collect_hint_location_name(self.ctx, team, slot, location,
|
hints = collect_hint_location_name(self.ctx, team, slot, location)
|
||||||
HintStatus.HINT_UNSPECIFIED)
|
|
||||||
if hints:
|
if hints:
|
||||||
self.ctx.notify_hints(team, hints)
|
self.ctx.notify_hints(team, hints)
|
||||||
else:
|
else:
|
||||||
@@ -2423,6 +2512,11 @@ class ServerCommandProcessor(CommonCommandProcessor):
|
|||||||
elif value_type == str and option_name.endswith("password"):
|
elif value_type == str and option_name.endswith("password"):
|
||||||
def value_type(input_text: str):
|
def value_type(input_text: str):
|
||||||
return None if input_text.lower() in {"null", "none", '""', "''"} else input_text
|
return None if input_text.lower() in {"null", "none", '""', "''"} else input_text
|
||||||
|
elif option_name == "countdown_mode":
|
||||||
|
valid_values = {"enabled", "disabled", "auto"}
|
||||||
|
if option_value.lower() not in valid_values:
|
||||||
|
self.output(f"Unrecognized {option_name} value '{option_value}', known: {', '.join(valid_values)}")
|
||||||
|
return False
|
||||||
elif value_type == str and option_name.endswith("mode"):
|
elif value_type == str and option_name.endswith("mode"):
|
||||||
valid_values = {"goal", "enabled", "disabled"}
|
valid_values = {"goal", "enabled", "disabled"}
|
||||||
valid_values.update(("auto", "auto_enabled") if option_name != "remaining_mode" else [])
|
valid_values.update(("auto", "auto_enabled") if option_name != "remaining_mode" else [])
|
||||||
@@ -2510,6 +2604,13 @@ def parse_args() -> argparse.Namespace:
|
|||||||
goal: !collect can be used after goal completion
|
goal: !collect can be used after goal completion
|
||||||
auto-enabled: !collect is available and automatically triggered on goal completion
|
auto-enabled: !collect is available and automatically triggered on goal completion
|
||||||
''')
|
''')
|
||||||
|
parser.add_argument('--countdown_mode', default=defaults["countdown_mode"], nargs='?',
|
||||||
|
choices=['enabled', 'disabled', "auto"], help='''\
|
||||||
|
Select !countdown Accessibility. (default: %(default)s)
|
||||||
|
enabled: !countdown is always available
|
||||||
|
disabled: !countdown is never available
|
||||||
|
auto: !countdown is available for rooms with less than 30 players
|
||||||
|
''')
|
||||||
parser.add_argument('--remaining_mode', default=defaults["remaining_mode"], nargs='?',
|
parser.add_argument('--remaining_mode', default=defaults["remaining_mode"], nargs='?',
|
||||||
choices=['enabled', 'disabled', "goal"], help='''\
|
choices=['enabled', 'disabled', "goal"], help='''\
|
||||||
Select !remaining Accessibility. (default: %(default)s)
|
Select !remaining Accessibility. (default: %(default)s)
|
||||||
@@ -2575,7 +2676,7 @@ async def main(args: argparse.Namespace):
|
|||||||
|
|
||||||
ctx = Context(args.host, args.port, args.server_password, args.password, args.location_check_points,
|
ctx = Context(args.host, args.port, args.server_password, args.password, args.location_check_points,
|
||||||
args.hint_cost, not args.disable_item_cheat, args.release_mode, args.collect_mode,
|
args.hint_cost, not args.disable_item_cheat, args.release_mode, args.collect_mode,
|
||||||
args.remaining_mode,
|
args.countdown_mode, args.remaining_mode,
|
||||||
args.auto_shutdown, args.compatibility, args.log_network)
|
args.auto_shutdown, args.compatibility, args.log_network)
|
||||||
data_filename = args.multidata
|
data_filename = args.multidata
|
||||||
|
|
||||||
@@ -2610,7 +2711,13 @@ async def main(args: argparse.Namespace):
|
|||||||
|
|
||||||
ssl_context = load_server_cert(args.cert, args.cert_key) if args.cert else None
|
ssl_context = load_server_cert(args.cert, args.cert_key) if args.cert else None
|
||||||
|
|
||||||
ctx.server = websockets.serve(functools.partial(server, ctx=ctx), host=ctx.host, port=ctx.port, ssl=ssl_context)
|
ctx.server = websockets.serve(
|
||||||
|
functools.partial(server, ctx=ctx),
|
||||||
|
host=ctx.host,
|
||||||
|
port=ctx.port,
|
||||||
|
ssl=ssl_context,
|
||||||
|
extensions=[server_per_message_deflate_factory],
|
||||||
|
)
|
||||||
ip = args.host if args.host else Utils.get_public_ipv4()
|
ip = args.host if args.host else Utils.get_public_ipv4()
|
||||||
logging.info('Hosting game at %s:%d (%s)' % (ip, ctx.port,
|
logging.info('Hosting game at %s:%d (%s)' % (ip, ctx.port,
|
||||||
'No password' if not ctx.password else 'Password: %s' % ctx.password))
|
'No password' if not ctx.password else 'Password: %s' % ctx.password))
|
||||||
|
|||||||
@@ -174,6 +174,8 @@ decode = JSONDecoder(object_hook=_object_hook).decode
|
|||||||
|
|
||||||
|
|
||||||
class Endpoint:
|
class Endpoint:
|
||||||
|
__slots__ = ("socket",)
|
||||||
|
|
||||||
socket: "ServerConnection"
|
socket: "ServerConnection"
|
||||||
|
|
||||||
def __init__(self, socket):
|
def __init__(self, socket):
|
||||||
|
|||||||
14
Options.py
14
Options.py
@@ -1380,7 +1380,7 @@ class NonLocalItems(ItemSet):
|
|||||||
|
|
||||||
|
|
||||||
class StartInventory(ItemDict):
|
class StartInventory(ItemDict):
|
||||||
"""Start with these items."""
|
"""Start with the specified amount of these items. Example: "Bomb: 1" """
|
||||||
verify_item_name = True
|
verify_item_name = True
|
||||||
display_name = "Start Inventory"
|
display_name = "Start Inventory"
|
||||||
rich_text_doc = True
|
rich_text_doc = True
|
||||||
@@ -1388,7 +1388,7 @@ class StartInventory(ItemDict):
|
|||||||
|
|
||||||
|
|
||||||
class StartInventoryPool(StartInventory):
|
class StartInventoryPool(StartInventory):
|
||||||
"""Start with these items and don't place them in the world.
|
"""Start with the specified amount of these items and don't place them in the world. Example: "Bomb: 1"
|
||||||
|
|
||||||
The game decides what the replacement items will be.
|
The game decides what the replacement items will be.
|
||||||
"""
|
"""
|
||||||
@@ -1446,6 +1446,7 @@ class ItemLinks(OptionList):
|
|||||||
Optional("local_items"): [And(str, len)],
|
Optional("local_items"): [And(str, len)],
|
||||||
Optional("non_local_items"): [And(str, len)],
|
Optional("non_local_items"): [And(str, len)],
|
||||||
Optional("link_replacement"): Or(None, bool),
|
Optional("link_replacement"): Or(None, bool),
|
||||||
|
Optional("skip_if_solo"): Or(None, bool),
|
||||||
}
|
}
|
||||||
])
|
])
|
||||||
|
|
||||||
@@ -1473,8 +1474,10 @@ class ItemLinks(OptionList):
|
|||||||
super(ItemLinks, self).verify(world, player_name, plando_options)
|
super(ItemLinks, self).verify(world, player_name, plando_options)
|
||||||
existing_links = set()
|
existing_links = set()
|
||||||
for link in self.value:
|
for link in self.value:
|
||||||
|
link["name"] = link["name"].strip()[:16].strip()
|
||||||
if link["name"] in existing_links:
|
if link["name"] in existing_links:
|
||||||
raise Exception(f"You cannot have more than one link named {link['name']}.")
|
raise Exception(f"Item link names are limited to their first 16 characters and must be unique. "
|
||||||
|
f"You have more than one link named '{link['name']}'.")
|
||||||
existing_links.add(link["name"])
|
existing_links.add(link["name"])
|
||||||
|
|
||||||
pool = self.verify_items(link["item_pool"], link["name"], "item_pool", world)
|
pool = self.verify_items(link["item_pool"], link["name"], "item_pool", world)
|
||||||
@@ -1752,7 +1755,10 @@ def generate_yaml_templates(target_folder: typing.Union[str, "pathlib.Path"], ge
|
|||||||
|
|
||||||
res = template.render(
|
res = template.render(
|
||||||
option_groups=option_groups,
|
option_groups=option_groups,
|
||||||
__version__=__version__, game=game_name, yaml_dump=yaml_dump_scalar,
|
__version__=__version__,
|
||||||
|
game=game_name,
|
||||||
|
world_version=world.world_version.as_simple_string(),
|
||||||
|
yaml_dump=yaml_dump_scalar,
|
||||||
dictify_range=dictify_range,
|
dictify_range=dictify_range,
|
||||||
cleandoc=cleandoc,
|
cleandoc=cleandoc,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ Currently, the following games are supported:
|
|||||||
* Meritous
|
* Meritous
|
||||||
* Super Metroid/Link to the Past combo randomizer (SMZ3)
|
* Super Metroid/Link to the Past combo randomizer (SMZ3)
|
||||||
* ChecksFinder
|
* ChecksFinder
|
||||||
* ArchipIDLE
|
|
||||||
* Hollow Knight
|
* Hollow Knight
|
||||||
* The Witness
|
* The Witness
|
||||||
* Sonic Adventure 2: Battle
|
* Sonic Adventure 2: Battle
|
||||||
@@ -81,6 +80,8 @@ Currently, the following games are supported:
|
|||||||
* Super Mario Land 2: 6 Golden Coins
|
* Super Mario Land 2: 6 Golden Coins
|
||||||
* shapez
|
* shapez
|
||||||
* Paint
|
* Paint
|
||||||
|
* Celeste (Open World)
|
||||||
|
* Choo-Choo Charles
|
||||||
|
|
||||||
For setup and instructions check out our [tutorials page](https://archipelago.gg/tutorial/).
|
For setup and instructions check out our [tutorials page](https://archipelago.gg/tutorial/).
|
||||||
Downloads can be found at [Releases](https://github.com/ArchipelagoMW/Archipelago/releases), including compiled
|
Downloads can be found at [Releases](https://github.com/ArchipelagoMW/Archipelago/releases), including compiled
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ from json import loads, dumps
|
|||||||
from CommonClient import CommonContext, server_loop, ClientCommandProcessor, gui_enabled, get_base_parser
|
from CommonClient import CommonContext, server_loop, ClientCommandProcessor, gui_enabled, get_base_parser
|
||||||
|
|
||||||
import Utils
|
import Utils
|
||||||
from settings import Settings
|
import settings
|
||||||
from Utils import async_start
|
from Utils import async_start
|
||||||
from MultiServer import mark_raw
|
from MultiServer import mark_raw
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
@@ -286,7 +286,7 @@ class SNESState(enum.IntEnum):
|
|||||||
|
|
||||||
|
|
||||||
def launch_sni() -> None:
|
def launch_sni() -> None:
|
||||||
sni_path = Settings.sni_options.sni_path
|
sni_path = settings.get_settings().sni_options.sni_path
|
||||||
|
|
||||||
if not os.path.isdir(sni_path):
|
if not os.path.isdir(sni_path):
|
||||||
sni_path = Utils.local_path(sni_path)
|
sni_path = Utils.local_path(sni_path)
|
||||||
@@ -669,7 +669,7 @@ async def game_watcher(ctx: SNIContext) -> None:
|
|||||||
|
|
||||||
|
|
||||||
async def run_game(romfile: str) -> None:
|
async def run_game(romfile: str) -> None:
|
||||||
auto_start = Settings.sni_options.snes_rom_start
|
auto_start = settings.get_settings().sni_options.snes_rom_start
|
||||||
if auto_start is True:
|
if auto_start is True:
|
||||||
import webbrowser
|
import webbrowser
|
||||||
webbrowser.open(romfile)
|
webbrowser.open(romfile)
|
||||||
|
|||||||
@@ -1,11 +0,0 @@
|
|||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import ModuleUpdate
|
|
||||||
ModuleUpdate.update()
|
|
||||||
|
|
||||||
from worlds.sc2.Client import launch
|
|
||||||
import Utils
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
Utils.init_logging("Starcraft2Client", exception_logger="Client")
|
|
||||||
launch()
|
|
||||||
90
Utils.py
90
Utils.py
@@ -1,6 +1,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import concurrent.futures
|
||||||
import json
|
import json
|
||||||
import typing
|
import typing
|
||||||
import builtins
|
import builtins
|
||||||
@@ -35,7 +36,7 @@ if typing.TYPE_CHECKING:
|
|||||||
|
|
||||||
|
|
||||||
def tuplize_version(version: str) -> Version:
|
def tuplize_version(version: str) -> Version:
|
||||||
return Version(*(int(piece, 10) for piece in version.split(".")))
|
return Version(*(int(piece) for piece in version.split(".")))
|
||||||
|
|
||||||
|
|
||||||
class Version(typing.NamedTuple):
|
class Version(typing.NamedTuple):
|
||||||
@@ -47,7 +48,7 @@ class Version(typing.NamedTuple):
|
|||||||
return ".".join(str(item) for item in self)
|
return ".".join(str(item) for item in self)
|
||||||
|
|
||||||
|
|
||||||
__version__ = "0.6.3"
|
__version__ = "0.6.4"
|
||||||
version_tuple = tuplize_version(__version__)
|
version_tuple = tuplize_version(__version__)
|
||||||
|
|
||||||
is_linux = sys.platform.startswith("linux")
|
is_linux = sys.platform.startswith("linux")
|
||||||
@@ -322,11 +323,13 @@ def get_options() -> Settings:
|
|||||||
return get_settings()
|
return get_settings()
|
||||||
|
|
||||||
|
|
||||||
def persistent_store(category: str, key: str, value: typing.Any):
|
def persistent_store(category: str, key: str, value: typing.Any, force_store: bool = False):
|
||||||
path = user_path("_persistent_storage.yaml")
|
|
||||||
storage = persistent_load()
|
storage = persistent_load()
|
||||||
|
if not force_store and category in storage and key in storage[category] and storage[category][key] == value:
|
||||||
|
return # no changes necessary
|
||||||
category_dict = storage.setdefault(category, {})
|
category_dict = storage.setdefault(category, {})
|
||||||
category_dict[key] = value
|
category_dict[key] = value
|
||||||
|
path = user_path("_persistent_storage.yaml")
|
||||||
with open(path, "wt") as f:
|
with open(path, "wt") as f:
|
||||||
f.write(dump(storage, Dumper=Dumper))
|
f.write(dump(storage, Dumper=Dumper))
|
||||||
|
|
||||||
@@ -414,11 +417,11 @@ def get_adjuster_settings(game_name: str) -> Namespace:
|
|||||||
@cache_argsless
|
@cache_argsless
|
||||||
def get_unique_identifier():
|
def get_unique_identifier():
|
||||||
common_path = cache_path("common.json")
|
common_path = cache_path("common.json")
|
||||||
if os.path.exists(common_path):
|
try:
|
||||||
with open(common_path) as f:
|
with open(common_path) as f:
|
||||||
common_file = json.load(f)
|
common_file = json.load(f)
|
||||||
uuid = common_file.get("uuid", None)
|
uuid = common_file.get("uuid", None)
|
||||||
else:
|
except FileNotFoundError:
|
||||||
common_file = {}
|
common_file = {}
|
||||||
uuid = None
|
uuid = None
|
||||||
|
|
||||||
@@ -428,6 +431,9 @@ def get_unique_identifier():
|
|||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
uuid = str(uuid4())
|
uuid = str(uuid4())
|
||||||
common_file["uuid"] = uuid
|
common_file["uuid"] = uuid
|
||||||
|
|
||||||
|
cache_folder = os.path.dirname(common_path)
|
||||||
|
os.makedirs(cache_folder, exist_ok=True)
|
||||||
with open(common_path, "w") as f:
|
with open(common_path, "w") as f:
|
||||||
json.dump(common_file, f, separators=(",", ":"))
|
json.dump(common_file, f, separators=(",", ":"))
|
||||||
return uuid
|
return uuid
|
||||||
@@ -472,7 +478,7 @@ class RestrictedUnpickler(pickle.Unpickler):
|
|||||||
mod = importlib.import_module(module)
|
mod = importlib.import_module(module)
|
||||||
obj = getattr(mod, name)
|
obj = getattr(mod, name)
|
||||||
if issubclass(obj, (self.options_module.Option, self.options_module.PlandoConnection,
|
if issubclass(obj, (self.options_module.Option, self.options_module.PlandoConnection,
|
||||||
self.options_module.PlandoText)):
|
self.options_module.PlandoItem, self.options_module.PlandoText)):
|
||||||
return obj
|
return obj
|
||||||
# Forbid everything else.
|
# Forbid everything else.
|
||||||
raise pickle.UnpicklingError(f"global '{module}.{name}' is forbidden")
|
raise pickle.UnpicklingError(f"global '{module}.{name}' is forbidden")
|
||||||
@@ -715,13 +721,22 @@ def get_intended_text(input_text: str, possible_answers) -> typing.Tuple[str, bo
|
|||||||
|
|
||||||
|
|
||||||
def get_input_text_from_response(text: str, command: str) -> typing.Optional[str]:
|
def get_input_text_from_response(text: str, command: str) -> typing.Optional[str]:
|
||||||
|
"""
|
||||||
|
Parses the response text from `get_intended_text` to find the suggested input and autocomplete the command in
|
||||||
|
arguments with it.
|
||||||
|
|
||||||
|
:param text: The response text from `get_intended_text`.
|
||||||
|
:param command: The command to which the input text should be added. Must contain the prefix used by the command
|
||||||
|
(`!` or `/`).
|
||||||
|
:return: The command with the suggested input text appended, or None if no suggestion was found.
|
||||||
|
"""
|
||||||
if "did you mean " in text:
|
if "did you mean " in text:
|
||||||
for question in ("Didn't find something that closely matches",
|
for question in ("Didn't find something that closely matches",
|
||||||
"Too many close matches"):
|
"Too many close matches"):
|
||||||
if text.startswith(question):
|
if text.startswith(question):
|
||||||
name = get_text_between(text, "did you mean '",
|
name = get_text_between(text, "did you mean '",
|
||||||
"'? (")
|
"'? (")
|
||||||
return f"!{command} {name}"
|
return f"{command} {name}"
|
||||||
elif text.startswith("Missing: "):
|
elif text.startswith("Missing: "):
|
||||||
return text.replace("Missing: ", "!hint_location ")
|
return text.replace("Missing: ", "!hint_location ")
|
||||||
return None
|
return None
|
||||||
@@ -900,7 +915,7 @@ def async_start(co: Coroutine[None, None, typing.Any], name: Optional[str] = Non
|
|||||||
Use this to start a task when you don't keep a reference to it or immediately await it,
|
Use this to start a task when you don't keep a reference to it or immediately await it,
|
||||||
to prevent early garbage collection. "fire-and-forget"
|
to prevent early garbage collection. "fire-and-forget"
|
||||||
"""
|
"""
|
||||||
# https://docs.python.org/3.10/library/asyncio-task.html#asyncio.create_task
|
# https://docs.python.org/3.11/library/asyncio-task.html#asyncio.create_task
|
||||||
# Python docs:
|
# Python docs:
|
||||||
# ```
|
# ```
|
||||||
# Important: Save a reference to the result of [asyncio.create_task],
|
# Important: Save a reference to the result of [asyncio.create_task],
|
||||||
@@ -937,15 +952,15 @@ class DeprecateDict(dict):
|
|||||||
|
|
||||||
|
|
||||||
def _extend_freeze_support() -> None:
|
def _extend_freeze_support() -> None:
|
||||||
"""Extend multiprocessing.freeze_support() to also work on Non-Windows for spawn."""
|
"""Extend multiprocessing.freeze_support() to also work on Non-Windows and without setting spawn method first."""
|
||||||
# upstream issue: https://github.com/python/cpython/issues/76327
|
# original upstream issue: https://github.com/python/cpython/issues/76327
|
||||||
# code based on https://github.com/pyinstaller/pyinstaller/blob/develop/PyInstaller/hooks/rthooks/pyi_rth_multiprocessing.py#L26
|
# code based on https://github.com/pyinstaller/pyinstaller/blob/develop/PyInstaller/hooks/rthooks/pyi_rth_multiprocessing.py#L26
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import multiprocessing.spawn
|
import multiprocessing.spawn
|
||||||
|
|
||||||
def _freeze_support() -> None:
|
def _freeze_support() -> None:
|
||||||
"""Minimal freeze_support. Only apply this if frozen."""
|
"""Minimal freeze_support. Only apply this if frozen."""
|
||||||
from subprocess import _args_from_interpreter_flags
|
from subprocess import _args_from_interpreter_flags # noqa
|
||||||
|
|
||||||
# Prevent `spawn` from trying to read `__main__` in from the main script
|
# Prevent `spawn` from trying to read `__main__` in from the main script
|
||||||
multiprocessing.process.ORIGINAL_DIR = None
|
multiprocessing.process.ORIGINAL_DIR = None
|
||||||
@@ -972,17 +987,23 @@ def _extend_freeze_support() -> None:
|
|||||||
multiprocessing.spawn.spawn_main(**kwargs)
|
multiprocessing.spawn.spawn_main(**kwargs)
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
if not is_windows and is_frozen():
|
def _noop() -> None:
|
||||||
multiprocessing.freeze_support = multiprocessing.spawn.freeze_support = _freeze_support
|
pass
|
||||||
|
|
||||||
|
multiprocessing.freeze_support = multiprocessing.spawn.freeze_support = _freeze_support if is_frozen() else _noop
|
||||||
|
|
||||||
|
|
||||||
def freeze_support() -> None:
|
def freeze_support() -> None:
|
||||||
"""This behaves like multiprocessing.freeze_support but also works on Non-Windows."""
|
"""This now only calls multiprocessing.freeze_support since we are patching freeze_support on module load."""
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
_extend_freeze_support()
|
|
||||||
|
deprecate("Use multiprocessing.freeze_support() instead")
|
||||||
multiprocessing.freeze_support()
|
multiprocessing.freeze_support()
|
||||||
|
|
||||||
|
|
||||||
|
_extend_freeze_support()
|
||||||
|
|
||||||
|
|
||||||
def visualize_regions(root_region: Region, file_name: str, *,
|
def visualize_regions(root_region: Region, file_name: str, *,
|
||||||
show_entrance_names: bool = False, show_locations: bool = True, show_other_regions: bool = True,
|
show_entrance_names: bool = False, show_locations: bool = True, show_other_regions: bool = True,
|
||||||
linetype_ortho: bool = True, regions_to_highlight: set[Region] | None = None) -> None:
|
linetype_ortho: bool = True, regions_to_highlight: set[Region] | None = None) -> None:
|
||||||
@@ -1118,3 +1139,40 @@ def is_iterable_except_str(obj: object) -> TypeGuard[typing.Iterable[typing.Any]
|
|||||||
if isinstance(obj, str):
|
if isinstance(obj, str):
|
||||||
return False
|
return False
|
||||||
return isinstance(obj, typing.Iterable)
|
return isinstance(obj, typing.Iterable)
|
||||||
|
|
||||||
|
|
||||||
|
class DaemonThreadPoolExecutor(concurrent.futures.ThreadPoolExecutor):
|
||||||
|
"""
|
||||||
|
ThreadPoolExecutor that uses daemonic threads that do not keep the program alive.
|
||||||
|
NOTE: use this with caution because killed threads will not properly clean up.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _adjust_thread_count(self):
|
||||||
|
# see upstream ThreadPoolExecutor for details
|
||||||
|
import threading
|
||||||
|
import weakref
|
||||||
|
from concurrent.futures.thread import _worker
|
||||||
|
|
||||||
|
if self._idle_semaphore.acquire(timeout=0):
|
||||||
|
return
|
||||||
|
|
||||||
|
def weakref_cb(_, q=self._work_queue):
|
||||||
|
q.put(None)
|
||||||
|
|
||||||
|
num_threads = len(self._threads)
|
||||||
|
if num_threads < self._max_workers:
|
||||||
|
thread_name = f"{self._thread_name_prefix or self}_{num_threads}"
|
||||||
|
t = threading.Thread(
|
||||||
|
name=thread_name,
|
||||||
|
target=_worker,
|
||||||
|
args=(
|
||||||
|
weakref.ref(self, weakref_cb),
|
||||||
|
self._work_queue,
|
||||||
|
self._initializer,
|
||||||
|
self._initargs,
|
||||||
|
),
|
||||||
|
daemon=True,
|
||||||
|
)
|
||||||
|
t.start()
|
||||||
|
self._threads.add(t)
|
||||||
|
# NOTE: don't add to _threads_queues so we don't block on shutdown
|
||||||
|
|||||||
@@ -99,16 +99,23 @@ if __name__ == "__main__":
|
|||||||
multiprocessing.set_start_method('spawn')
|
multiprocessing.set_start_method('spawn')
|
||||||
logging.basicConfig(format='[%(asctime)s] %(message)s', level=logging.INFO)
|
logging.basicConfig(format='[%(asctime)s] %(message)s', level=logging.INFO)
|
||||||
|
|
||||||
from WebHostLib.lttpsprites import update_sprites_lttp
|
|
||||||
from WebHostLib.autolauncher import autohost, autogen, stop
|
from WebHostLib.autolauncher import autohost, autogen, stop
|
||||||
from WebHostLib.options import create as create_options_files
|
from WebHostLib.options import create as create_options_files
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
from WebHostLib.lttpsprites import update_sprites_lttp
|
||||||
update_sprites_lttp()
|
update_sprites_lttp()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception(e)
|
logging.exception(e)
|
||||||
logging.warning("Could not update LttP sprites.")
|
logging.warning("Could not update LttP sprites.")
|
||||||
app = get_app()
|
app = get_app()
|
||||||
|
from worlds import AutoWorldRegister
|
||||||
|
# Update to only valid WebHost worlds
|
||||||
|
invalid_worlds = {name for name, world in AutoWorldRegister.world_types.items()
|
||||||
|
if not hasattr(world.web, "tutorials")}
|
||||||
|
if invalid_worlds:
|
||||||
|
logging.error(f"Following worlds not loaded as they are invalid for WebHost: {invalid_worlds}")
|
||||||
|
AutoWorldRegister.world_types = {k: v for k, v in AutoWorldRegister.world_types.items() if k not in invalid_worlds}
|
||||||
create_options_files()
|
create_options_files()
|
||||||
copy_tutorials_files_to_static()
|
copy_tutorials_files_to_static()
|
||||||
if app.config["SELFLAUNCH"]:
|
if app.config["SELFLAUNCH"]:
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import base64
|
import base64
|
||||||
import os
|
import os
|
||||||
import socket
|
import socket
|
||||||
|
import typing
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from flask import Flask
|
from flask import Flask
|
||||||
@@ -61,20 +62,21 @@ cache = Cache()
|
|||||||
Compress(app)
|
Compress(app)
|
||||||
|
|
||||||
|
|
||||||
def to_python(value):
|
def to_python(value: str) -> uuid.UUID:
|
||||||
return uuid.UUID(bytes=base64.urlsafe_b64decode(value + '=='))
|
return uuid.UUID(bytes=base64.urlsafe_b64decode(value + '=='))
|
||||||
|
|
||||||
|
|
||||||
def to_url(value):
|
def to_url(value: uuid.UUID) -> str:
|
||||||
return base64.urlsafe_b64encode(value.bytes).rstrip(b'=').decode('ascii')
|
return base64.urlsafe_b64encode(value.bytes).rstrip(b'=').decode('ascii')
|
||||||
|
|
||||||
|
|
||||||
class B64UUIDConverter(BaseConverter):
|
class B64UUIDConverter(BaseConverter):
|
||||||
|
|
||||||
def to_python(self, value):
|
def to_python(self, value: str) -> uuid.UUID:
|
||||||
return to_python(value)
|
return to_python(value)
|
||||||
|
|
||||||
def to_url(self, value):
|
def to_url(self, value: typing.Any) -> str:
|
||||||
|
assert isinstance(value, uuid.UUID)
|
||||||
return to_url(value)
|
return to_url(value)
|
||||||
|
|
||||||
|
|
||||||
@@ -84,7 +86,7 @@ app.jinja_env.filters["suuid"] = to_url
|
|||||||
app.jinja_env.filters["title_sorted"] = title_sorted
|
app.jinja_env.filters["title_sorted"] = title_sorted
|
||||||
|
|
||||||
|
|
||||||
def register():
|
def register() -> None:
|
||||||
"""Import submodules, triggering their registering on flask routing.
|
"""Import submodules, triggering their registering on flask routing.
|
||||||
Note: initializes worlds subsystem."""
|
Note: initializes worlds subsystem."""
|
||||||
import importlib
|
import importlib
|
||||||
|
|||||||
@@ -11,5 +11,5 @@ api_endpoints = Blueprint('api', __name__, url_prefix="/api")
|
|||||||
def get_players(seed: Seed) -> List[Tuple[str, str]]:
|
def get_players(seed: Seed) -> List[Tuple[str, str]]:
|
||||||
return [(slot.player_name, slot.game) for slot in seed.slots.order_by(Slot.player_id)]
|
return [(slot.player_name, slot.game) for slot in seed.slots.order_by(Slot.player_id)]
|
||||||
|
|
||||||
|
# trigger endpoint registration
|
||||||
from . import datapackage, generate, room, user # trigger registration
|
from . import datapackage, generate, room, tracker, user
|
||||||
|
|||||||
241
WebHostLib/api/tracker.py
Normal file
241
WebHostLib/api/tracker.py
Normal file
@@ -0,0 +1,241 @@
|
|||||||
|
from datetime import datetime, timezone
|
||||||
|
from typing import Any, TypedDict
|
||||||
|
from uuid import UUID
|
||||||
|
|
||||||
|
from flask import abort
|
||||||
|
|
||||||
|
from NetUtils import ClientStatus, Hint, NetworkItem, SlotType
|
||||||
|
from WebHostLib import cache
|
||||||
|
from WebHostLib.api import api_endpoints
|
||||||
|
from WebHostLib.models import Room
|
||||||
|
from WebHostLib.tracker import TrackerData
|
||||||
|
|
||||||
|
|
||||||
|
class PlayerAlias(TypedDict):
|
||||||
|
team: int
|
||||||
|
player: int
|
||||||
|
alias: str | None
|
||||||
|
|
||||||
|
|
||||||
|
class PlayerItemsReceived(TypedDict):
|
||||||
|
team: int
|
||||||
|
player: int
|
||||||
|
items: list[NetworkItem]
|
||||||
|
|
||||||
|
|
||||||
|
class PlayerChecksDone(TypedDict):
|
||||||
|
team: int
|
||||||
|
player: int
|
||||||
|
locations: list[int]
|
||||||
|
|
||||||
|
|
||||||
|
class TeamTotalChecks(TypedDict):
|
||||||
|
team: int
|
||||||
|
checks_done: int
|
||||||
|
|
||||||
|
|
||||||
|
class PlayerHints(TypedDict):
|
||||||
|
team: int
|
||||||
|
player: int
|
||||||
|
hints: list[Hint]
|
||||||
|
|
||||||
|
|
||||||
|
class PlayerTimer(TypedDict):
|
||||||
|
team: int
|
||||||
|
player: int
|
||||||
|
time: datetime | None
|
||||||
|
|
||||||
|
|
||||||
|
class PlayerStatus(TypedDict):
|
||||||
|
team: int
|
||||||
|
player: int
|
||||||
|
status: ClientStatus
|
||||||
|
|
||||||
|
|
||||||
|
class PlayerLocationsTotal(TypedDict):
|
||||||
|
team: int
|
||||||
|
player: int
|
||||||
|
total_locations: int
|
||||||
|
|
||||||
|
|
||||||
|
@api_endpoints.route("/tracker/<suuid:tracker>")
|
||||||
|
@cache.memoize(timeout=60)
|
||||||
|
def tracker_data(tracker: UUID) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Outputs json data to <root_path>/api/tracker/<id of current session tracker>.
|
||||||
|
|
||||||
|
:param tracker: UUID of current session tracker.
|
||||||
|
|
||||||
|
:return: Tracking data for all players in the room. Typing and docstrings describe the format of each value.
|
||||||
|
"""
|
||||||
|
room: Room | None = Room.get(tracker=tracker)
|
||||||
|
if not room:
|
||||||
|
abort(404)
|
||||||
|
|
||||||
|
tracker_data = TrackerData(room)
|
||||||
|
|
||||||
|
all_players: dict[int, list[int]] = tracker_data.get_all_players()
|
||||||
|
|
||||||
|
player_aliases: list[PlayerAlias] = []
|
||||||
|
"""Slot aliases of all players."""
|
||||||
|
for team, players in all_players.items():
|
||||||
|
for player in players:
|
||||||
|
player_aliases.append({"team": team, "player": player, "alias": tracker_data.get_player_alias(team, player)})
|
||||||
|
|
||||||
|
player_items_received: list[PlayerItemsReceived] = []
|
||||||
|
"""Items received by each player."""
|
||||||
|
for team, players in all_players.items():
|
||||||
|
for player in players:
|
||||||
|
player_items_received.append(
|
||||||
|
{"team": team, "player": player, "items": tracker_data.get_player_received_items(team, player)})
|
||||||
|
|
||||||
|
player_checks_done: list[PlayerChecksDone] = []
|
||||||
|
"""ID of all locations checked by each player."""
|
||||||
|
for team, players in all_players.items():
|
||||||
|
for player in players:
|
||||||
|
player_checks_done.append(
|
||||||
|
{"team": team, "player": player, "locations": sorted(tracker_data.get_player_checked_locations(team, player))})
|
||||||
|
|
||||||
|
total_checks_done: list[TeamTotalChecks] = [
|
||||||
|
{"team": team, "checks_done": checks_done}
|
||||||
|
for team, checks_done in tracker_data.get_team_locations_checked_count().items()
|
||||||
|
]
|
||||||
|
"""Total number of locations checked for the entire multiworld per team."""
|
||||||
|
|
||||||
|
hints: list[PlayerHints] = []
|
||||||
|
"""Hints that all players have used or received."""
|
||||||
|
for team, players in tracker_data.get_all_slots().items():
|
||||||
|
for player in players:
|
||||||
|
player_hints = sorted(tracker_data.get_player_hints(team, player))
|
||||||
|
hints.append({"team": team, "player": player, "hints": player_hints})
|
||||||
|
slot_info = tracker_data.get_slot_info(player)
|
||||||
|
# this assumes groups are always after players
|
||||||
|
if slot_info.type != SlotType.group:
|
||||||
|
continue
|
||||||
|
for member in slot_info.group_members:
|
||||||
|
hints[member - 1]["hints"] += player_hints
|
||||||
|
|
||||||
|
activity_timers: list[PlayerTimer] = []
|
||||||
|
"""Time of last activity per player. Returned as RFC 1123 format and null if no connection has been made."""
|
||||||
|
for team, players in all_players.items():
|
||||||
|
for player in players:
|
||||||
|
activity_timers.append({"team": team, "player": player, "time": None})
|
||||||
|
|
||||||
|
for (team, player), timestamp in tracker_data._multisave.get("client_activity_timers", []):
|
||||||
|
for entry in activity_timers:
|
||||||
|
if entry["team"] == team and entry["player"] == player:
|
||||||
|
entry["time"] = datetime.fromtimestamp(timestamp, timezone.utc)
|
||||||
|
break
|
||||||
|
|
||||||
|
connection_timers: list[PlayerTimer] = []
|
||||||
|
"""Time of last connection per player. Returned as RFC 1123 format and null if no connection has been made."""
|
||||||
|
for team, players in all_players.items():
|
||||||
|
for player in players:
|
||||||
|
connection_timers.append({"team": team, "player": player, "time": None})
|
||||||
|
|
||||||
|
for (team, player), timestamp in tracker_data._multisave.get("client_connection_timers", []):
|
||||||
|
# find the matching entry
|
||||||
|
for entry in connection_timers:
|
||||||
|
if entry["team"] == team and entry["player"] == player:
|
||||||
|
entry["time"] = datetime.fromtimestamp(timestamp, timezone.utc)
|
||||||
|
break
|
||||||
|
|
||||||
|
player_status: list[PlayerStatus] = []
|
||||||
|
"""The current client status for each player."""
|
||||||
|
for team, players in all_players.items():
|
||||||
|
for player in players:
|
||||||
|
player_status.append({"team": team, "player": player, "status": tracker_data.get_player_client_status(team, player)})
|
||||||
|
|
||||||
|
return {
|
||||||
|
"aliases": player_aliases,
|
||||||
|
"player_items_received": player_items_received,
|
||||||
|
"player_checks_done": player_checks_done,
|
||||||
|
"total_checks_done": total_checks_done,
|
||||||
|
"hints": hints,
|
||||||
|
"activity_timers": activity_timers,
|
||||||
|
"connection_timers": connection_timers,
|
||||||
|
"player_status": player_status,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class PlayerGroups(TypedDict):
|
||||||
|
slot: int
|
||||||
|
name: str
|
||||||
|
members: list[int]
|
||||||
|
|
||||||
|
|
||||||
|
class PlayerSlotData(TypedDict):
|
||||||
|
player: int
|
||||||
|
slot_data: dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
|
@api_endpoints.route("/static_tracker/<suuid:tracker>")
|
||||||
|
@cache.memoize(timeout=300)
|
||||||
|
def static_tracker_data(tracker: UUID) -> dict[str, Any]:
|
||||||
|
"""
|
||||||
|
Outputs json data to <root_path>/api/static_tracker/<id of current session tracker>.
|
||||||
|
|
||||||
|
:param tracker: UUID of current session tracker.
|
||||||
|
|
||||||
|
:return: Static tracking data for all players in the room. Typing and docstrings describe the format of each value.
|
||||||
|
"""
|
||||||
|
room: Room | None = Room.get(tracker=tracker)
|
||||||
|
if not room:
|
||||||
|
abort(404)
|
||||||
|
tracker_data = TrackerData(room)
|
||||||
|
|
||||||
|
all_players: dict[int, list[int]] = tracker_data.get_all_players()
|
||||||
|
|
||||||
|
groups: list[PlayerGroups] = []
|
||||||
|
"""The Slot ID of groups and the IDs of the group's members."""
|
||||||
|
for team, players in tracker_data.get_all_slots().items():
|
||||||
|
for player in players:
|
||||||
|
slot_info = tracker_data.get_slot_info(player)
|
||||||
|
if slot_info.type != SlotType.group or not slot_info.group_members:
|
||||||
|
continue
|
||||||
|
groups.append(
|
||||||
|
{
|
||||||
|
"slot": player,
|
||||||
|
"name": slot_info.name,
|
||||||
|
"members": list(slot_info.group_members),
|
||||||
|
})
|
||||||
|
break
|
||||||
|
|
||||||
|
player_locations_total: list[PlayerLocationsTotal] = []
|
||||||
|
for team, players in all_players.items():
|
||||||
|
for player in players:
|
||||||
|
player_locations_total.append(
|
||||||
|
{"team": team, "player": player, "total_locations": len(tracker_data.get_player_locations(player))})
|
||||||
|
|
||||||
|
return {
|
||||||
|
"groups": groups,
|
||||||
|
"datapackage": tracker_data._multidata["datapackage"],
|
||||||
|
"player_locations_total": player_locations_total,
|
||||||
|
}
|
||||||
|
|
||||||
|
# It should be exceedingly rare that slot data is needed, so it's separated out.
|
||||||
|
@api_endpoints.route("/slot_data_tracker/<suuid:tracker>")
|
||||||
|
@cache.memoize(timeout=300)
|
||||||
|
def tracker_slot_data(tracker: UUID) -> list[PlayerSlotData]:
|
||||||
|
"""
|
||||||
|
Outputs json data to <root_path>/api/slot_data_tracker/<id of current session tracker>.
|
||||||
|
|
||||||
|
:param tracker: UUID of current session tracker.
|
||||||
|
|
||||||
|
:return: Slot data for all players in the room. Typing completely arbitrary per game.
|
||||||
|
"""
|
||||||
|
room: Room | None = Room.get(tracker=tracker)
|
||||||
|
if not room:
|
||||||
|
abort(404)
|
||||||
|
tracker_data = TrackerData(room)
|
||||||
|
|
||||||
|
all_players: dict[int, list[int]] = tracker_data.get_all_players()
|
||||||
|
|
||||||
|
slot_data: list[PlayerSlotData] = []
|
||||||
|
"""Slot data for each player."""
|
||||||
|
for team, players in all_players.items():
|
||||||
|
for player in players:
|
||||||
|
slot_data.append({"player": player, "slot_data": tracker_data.get_slot_data(player)})
|
||||||
|
break
|
||||||
|
|
||||||
|
return slot_data
|
||||||
@@ -17,7 +17,7 @@ from .locker import Locker, AlreadyRunningException
|
|||||||
_stop_event = Event()
|
_stop_event = Event()
|
||||||
|
|
||||||
|
|
||||||
def stop():
|
def stop() -> None:
|
||||||
"""Stops previously launched threads"""
|
"""Stops previously launched threads"""
|
||||||
global _stop_event
|
global _stop_event
|
||||||
stop_event = _stop_event
|
stop_event = _stop_event
|
||||||
@@ -36,25 +36,39 @@ def handle_generation_failure(result: BaseException):
|
|||||||
logging.exception(e)
|
logging.exception(e)
|
||||||
|
|
||||||
|
|
||||||
def _mp_gen_game(gen_options: dict, meta: dict[str, Any] | None = None, owner=None, sid=None) -> PrimaryKey | None:
|
def _mp_gen_game(
|
||||||
|
gen_options: dict,
|
||||||
|
meta: dict[str, Any] | None = None,
|
||||||
|
owner=None,
|
||||||
|
sid=None,
|
||||||
|
timeout: int|None = None,
|
||||||
|
) -> PrimaryKey | None:
|
||||||
from setproctitle import setproctitle
|
from setproctitle import setproctitle
|
||||||
|
|
||||||
setproctitle(f"Generator ({sid})")
|
setproctitle(f"Generator ({sid})")
|
||||||
res = gen_game(gen_options, meta=meta, owner=owner, sid=sid)
|
try:
|
||||||
setproctitle(f"Generator (idle)")
|
return gen_game(gen_options, meta=meta, owner=owner, sid=sid, timeout=timeout)
|
||||||
return res
|
finally:
|
||||||
|
setproctitle(f"Generator (idle)")
|
||||||
|
|
||||||
|
|
||||||
def launch_generator(pool: multiprocessing.pool.Pool, generation: Generation):
|
def launch_generator(pool: multiprocessing.pool.Pool, generation: Generation, timeout: int|None) -> None:
|
||||||
try:
|
try:
|
||||||
meta = json.loads(generation.meta)
|
meta = json.loads(generation.meta)
|
||||||
options = restricted_loads(generation.options)
|
options = restricted_loads(generation.options)
|
||||||
logging.info(f"Generating {generation.id} for {len(options)} players")
|
logging.info(f"Generating {generation.id} for {len(options)} players")
|
||||||
pool.apply_async(_mp_gen_game, (options,),
|
pool.apply_async(
|
||||||
{"meta": meta,
|
_mp_gen_game,
|
||||||
"sid": generation.id,
|
(options,),
|
||||||
"owner": generation.owner},
|
{
|
||||||
handle_generation_success, handle_generation_failure)
|
"meta": meta,
|
||||||
|
"sid": generation.id,
|
||||||
|
"owner": generation.owner,
|
||||||
|
"timeout": timeout,
|
||||||
|
},
|
||||||
|
handle_generation_success,
|
||||||
|
handle_generation_failure,
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
generation.state = STATE_ERROR
|
generation.state = STATE_ERROR
|
||||||
commit()
|
commit()
|
||||||
@@ -135,6 +149,7 @@ def autogen(config: dict):
|
|||||||
|
|
||||||
with multiprocessing.Pool(config["GENERATORS"], initializer=init_generator,
|
with multiprocessing.Pool(config["GENERATORS"], initializer=init_generator,
|
||||||
initargs=(config,), maxtasksperchild=10) as generator_pool:
|
initargs=(config,), maxtasksperchild=10) as generator_pool:
|
||||||
|
job_time = config["JOB_TIME"]
|
||||||
with db_session:
|
with db_session:
|
||||||
to_start = select(generation for generation in Generation if generation.state == STATE_STARTED)
|
to_start = select(generation for generation in Generation if generation.state == STATE_STARTED)
|
||||||
|
|
||||||
@@ -145,7 +160,7 @@ def autogen(config: dict):
|
|||||||
if sid:
|
if sid:
|
||||||
generation.delete()
|
generation.delete()
|
||||||
else:
|
else:
|
||||||
launch_generator(generator_pool, generation)
|
launch_generator(generator_pool, generation, timeout=job_time)
|
||||||
|
|
||||||
commit()
|
commit()
|
||||||
select(generation for generation in Generation if generation.state == STATE_ERROR).delete()
|
select(generation for generation in Generation if generation.state == STATE_ERROR).delete()
|
||||||
@@ -157,7 +172,7 @@ def autogen(config: dict):
|
|||||||
generation for generation in Generation
|
generation for generation in Generation
|
||||||
if generation.state == STATE_QUEUED).for_update()
|
if generation.state == STATE_QUEUED).for_update()
|
||||||
for generation in to_start:
|
for generation in to_start:
|
||||||
launch_generator(generator_pool, generation)
|
launch_generator(generator_pool, generation, timeout=job_time)
|
||||||
except AlreadyRunningException:
|
except AlreadyRunningException:
|
||||||
logging.info("Autogen reports as already running, not starting another.")
|
logging.info("Autogen reports as already running, not starting another.")
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,10 @@ from pony.orm import commit, db_session, select
|
|||||||
|
|
||||||
import Utils
|
import Utils
|
||||||
|
|
||||||
from MultiServer import Context, server, auto_shutdown, ServerCommandProcessor, ClientMessageProcessor, load_server_cert
|
from MultiServer import (
|
||||||
|
Context, server, auto_shutdown, ServerCommandProcessor, ClientMessageProcessor, load_server_cert,
|
||||||
|
server_per_message_deflate_factory,
|
||||||
|
)
|
||||||
from Utils import restricted_loads, cache_argsless
|
from Utils import restricted_loads, cache_argsless
|
||||||
from .locker import Locker
|
from .locker import Locker
|
||||||
from .models import Command, GameDataPackage, Room, db
|
from .models import Command, GameDataPackage, Room, db
|
||||||
@@ -97,6 +100,7 @@ class WebHostContext(Context):
|
|||||||
self.main_loop.call_soon_threadsafe(cmdprocessor, command.commandtext)
|
self.main_loop.call_soon_threadsafe(cmdprocessor, command.commandtext)
|
||||||
command.delete()
|
command.delete()
|
||||||
commit()
|
commit()
|
||||||
|
del commands
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
|
|
||||||
@db_session
|
@db_session
|
||||||
@@ -146,13 +150,13 @@ class WebHostContext(Context):
|
|||||||
self.location_name_groups = static_location_name_groups
|
self.location_name_groups = static_location_name_groups
|
||||||
return self._load(multidata, game_data_packages, True)
|
return self._load(multidata, game_data_packages, True)
|
||||||
|
|
||||||
@db_session
|
|
||||||
def init_save(self, enabled: bool = True):
|
def init_save(self, enabled: bool = True):
|
||||||
self.saving = enabled
|
self.saving = enabled
|
||||||
if self.saving:
|
if self.saving:
|
||||||
savegame_data = Room.get(id=self.room_id).multisave
|
with db_session:
|
||||||
if savegame_data:
|
savegame_data = Room.get(id=self.room_id).multisave
|
||||||
self.set_save(restricted_loads(Room.get(id=self.room_id).multisave))
|
if savegame_data:
|
||||||
|
self.set_save(restricted_loads(Room.get(id=self.room_id).multisave))
|
||||||
self._start_async_saving(atexit_save=False)
|
self._start_async_saving(atexit_save=False)
|
||||||
threading.Thread(target=self.listen_to_db_commands, daemon=True).start()
|
threading.Thread(target=self.listen_to_db_commands, daemon=True).start()
|
||||||
|
|
||||||
@@ -282,8 +286,12 @@ def run_server_process(name: str, ponyconfig: dict, static_server_data: dict,
|
|||||||
assert ctx.server is None
|
assert ctx.server is None
|
||||||
try:
|
try:
|
||||||
ctx.server = websockets.serve(
|
ctx.server = websockets.serve(
|
||||||
functools.partial(server, ctx=ctx), ctx.host, ctx.port, ssl=get_ssl_context())
|
functools.partial(server, ctx=ctx),
|
||||||
|
ctx.host,
|
||||||
|
ctx.port,
|
||||||
|
ssl=get_ssl_context(),
|
||||||
|
extensions=[server_per_message_deflate_factory],
|
||||||
|
)
|
||||||
await ctx.server
|
await ctx.server
|
||||||
except OSError: # likely port in use
|
except OSError: # likely port in use
|
||||||
ctx.server = websockets.serve(
|
ctx.server = websockets.serve(
|
||||||
@@ -304,6 +312,7 @@ def run_server_process(name: str, ponyconfig: dict, static_server_data: dict,
|
|||||||
with db_session:
|
with db_session:
|
||||||
room = Room.get(id=ctx.room_id)
|
room = Room.get(id=ctx.room_id)
|
||||||
room.last_port = port
|
room.last_port = port
|
||||||
|
del room
|
||||||
else:
|
else:
|
||||||
ctx.logger.exception("Could not determine port. Likely hosting failure.")
|
ctx.logger.exception("Could not determine port. Likely hosting failure.")
|
||||||
with db_session:
|
with db_session:
|
||||||
@@ -322,6 +331,7 @@ def run_server_process(name: str, ponyconfig: dict, static_server_data: dict,
|
|||||||
with db_session:
|
with db_session:
|
||||||
room = Room.get(id=room_id)
|
room = Room.get(id=room_id)
|
||||||
room.last_port = -1
|
room.last_port = -1
|
||||||
|
del room
|
||||||
logger.exception(e)
|
logger.exception(e)
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
@@ -333,11 +343,12 @@ def run_server_process(name: str, ponyconfig: dict, static_server_data: dict,
|
|||||||
ctx.save_dirty = False # make sure the saving thread does not write to DB after final wakeup
|
ctx.save_dirty = False # make sure the saving thread does not write to DB after final wakeup
|
||||||
ctx.exit_event.set() # make sure the saving thread stops at some point
|
ctx.exit_event.set() # make sure the saving thread stops at some point
|
||||||
# NOTE: async saving should probably be an async task and could be merged with shutdown_task
|
# NOTE: async saving should probably be an async task and could be merged with shutdown_task
|
||||||
with (db_session):
|
with db_session:
|
||||||
# ensure the Room does not spin up again on its own, minute of safety buffer
|
# ensure the Room does not spin up again on its own, minute of safety buffer
|
||||||
room = Room.get(id=room_id)
|
room = Room.get(id=room_id)
|
||||||
room.last_activity = datetime.datetime.utcnow() - \
|
room.last_activity = datetime.datetime.utcnow() - \
|
||||||
datetime.timedelta(minutes=1, seconds=room.timeout)
|
datetime.timedelta(minutes=1, seconds=room.timeout)
|
||||||
|
del room
|
||||||
logging.info(f"Shutting down room {room_id} on {name}.")
|
logging.info(f"Shutting down room {room_id} on {name}.")
|
||||||
finally:
|
finally:
|
||||||
await asyncio.sleep(5)
|
await asyncio.sleep(5)
|
||||||
|
|||||||
@@ -12,12 +12,11 @@ from flask import flash, redirect, render_template, request, session, url_for
|
|||||||
from pony.orm import commit, db_session
|
from pony.orm import commit, db_session
|
||||||
|
|
||||||
from BaseClasses import get_seed, seeddigits
|
from BaseClasses import get_seed, seeddigits
|
||||||
from Generate import PlandoOptions, handle_name
|
from Generate import PlandoOptions, handle_name, mystery_argparse
|
||||||
from Main import main as ERmain
|
from Main import main as ERmain
|
||||||
from Utils import __version__, restricted_dumps
|
from Utils import __version__, restricted_dumps, DaemonThreadPoolExecutor
|
||||||
from WebHostLib import app
|
from WebHostLib import app
|
||||||
from settings import ServerOptions, GeneratorOptions
|
from settings import ServerOptions, GeneratorOptions
|
||||||
from worlds.alttp.EntranceRandomizer import parse_arguments
|
|
||||||
from .check import get_yaml_data, roll_options
|
from .check import get_yaml_data, roll_options
|
||||||
from .models import Generation, STATE_ERROR, STATE_QUEUED, Seed, UUID
|
from .models import Generation, STATE_ERROR, STATE_QUEUED, Seed, UUID
|
||||||
from .upload import upload_zip_to_db
|
from .upload import upload_zip_to_db
|
||||||
@@ -34,6 +33,7 @@ def get_meta(options_source: dict, race: bool = False) -> dict[str, list[str] |
|
|||||||
"release_mode": str(options_source.get("release_mode", ServerOptions.release_mode)),
|
"release_mode": str(options_source.get("release_mode", ServerOptions.release_mode)),
|
||||||
"remaining_mode": str(options_source.get("remaining_mode", ServerOptions.remaining_mode)),
|
"remaining_mode": str(options_source.get("remaining_mode", ServerOptions.remaining_mode)),
|
||||||
"collect_mode": str(options_source.get("collect_mode", ServerOptions.collect_mode)),
|
"collect_mode": str(options_source.get("collect_mode", ServerOptions.collect_mode)),
|
||||||
|
"countdown_mode": str(options_source.get("countdown_mode", ServerOptions.countdown_mode)),
|
||||||
"item_cheat": bool(int(options_source.get("item_cheat", not ServerOptions.disable_item_cheat))),
|
"item_cheat": bool(int(options_source.get("item_cheat", not ServerOptions.disable_item_cheat))),
|
||||||
"server_password": str(options_source.get("server_password", None)),
|
"server_password": str(options_source.get("server_password", None)),
|
||||||
}
|
}
|
||||||
@@ -73,6 +73,10 @@ def generate(race=False):
|
|||||||
return render_template("generate.html", race=race, version=__version__)
|
return render_template("generate.html", race=race, version=__version__)
|
||||||
|
|
||||||
|
|
||||||
|
def format_exception(e: BaseException) -> str:
|
||||||
|
return f"{e.__class__.__name__}: {e}"
|
||||||
|
|
||||||
|
|
||||||
def start_generation(options: dict[str, dict | str], meta: dict[str, Any]):
|
def start_generation(options: dict[str, dict | str], meta: dict[str, Any]):
|
||||||
results, gen_options = roll_options(options, set(meta["plando_options"]))
|
results, gen_options = roll_options(options, set(meta["plando_options"]))
|
||||||
|
|
||||||
@@ -93,7 +97,9 @@ def start_generation(options: dict[str, dict | str], meta: dict[str, Any]):
|
|||||||
except PicklingError as e:
|
except PicklingError as e:
|
||||||
from .autolauncher import handle_generation_failure
|
from .autolauncher import handle_generation_failure
|
||||||
handle_generation_failure(e)
|
handle_generation_failure(e)
|
||||||
return render_template("seedError.html", seed_error=("PicklingError: " + str(e)))
|
meta["error"] = format_exception(e)
|
||||||
|
details = json.dumps(meta, indent=4).strip()
|
||||||
|
return render_template("seedError.html", seed_error=meta["error"], details=details)
|
||||||
|
|
||||||
commit()
|
commit()
|
||||||
|
|
||||||
@@ -101,16 +107,18 @@ def start_generation(options: dict[str, dict | str], meta: dict[str, Any]):
|
|||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
seed_id = gen_game({name: vars(options) for name, options in gen_options.items()},
|
seed_id = gen_game({name: vars(options) for name, options in gen_options.items()},
|
||||||
meta=meta, owner=session["_id"].int)
|
meta=meta, owner=session["_id"].int, timeout=app.config["JOB_TIME"])
|
||||||
except BaseException as e:
|
except BaseException as e:
|
||||||
from .autolauncher import handle_generation_failure
|
from .autolauncher import handle_generation_failure
|
||||||
handle_generation_failure(e)
|
handle_generation_failure(e)
|
||||||
return render_template("seedError.html", seed_error=(e.__class__.__name__ + ": " + str(e)))
|
meta["error"] = format_exception(e)
|
||||||
|
details = json.dumps(meta, indent=4).strip()
|
||||||
|
return render_template("seedError.html", seed_error=meta["error"], details=details)
|
||||||
|
|
||||||
return redirect(url_for("view_seed", seed=seed_id))
|
return redirect(url_for("view_seed", seed=seed_id))
|
||||||
|
|
||||||
|
|
||||||
def gen_game(gen_options: dict, meta: dict[str, Any] | None = None, owner=None, sid=None):
|
def gen_game(gen_options: dict, meta: dict[str, Any] | None = None, owner=None, sid=None, timeout: int|None = None):
|
||||||
if meta is None:
|
if meta is None:
|
||||||
meta = {}
|
meta = {}
|
||||||
|
|
||||||
@@ -129,43 +137,47 @@ def gen_game(gen_options: dict, meta: dict[str, Any] | None = None, owner=None,
|
|||||||
|
|
||||||
seedname = "W" + (f"{random.randint(0, pow(10, seeddigits) - 1)}".zfill(seeddigits))
|
seedname = "W" + (f"{random.randint(0, pow(10, seeddigits) - 1)}".zfill(seeddigits))
|
||||||
|
|
||||||
erargs = parse_arguments(['--multi', str(playercount)])
|
args = mystery_argparse([]) # Just to set up the Namespace with defaults
|
||||||
erargs.seed = seed
|
args.multi = playercount
|
||||||
erargs.name = {x: "" for x in range(1, playercount + 1)} # only so it can be overwritten in mystery
|
args.seed = seed
|
||||||
erargs.spoiler = meta["generator_options"].get("spoiler", 0)
|
args.name = {x: "" for x in range(1, playercount + 1)} # only so it can be overwritten in mystery
|
||||||
erargs.race = race
|
args.spoiler = meta["generator_options"].get("spoiler", 0)
|
||||||
erargs.outputname = seedname
|
args.race = race
|
||||||
erargs.outputpath = target.name
|
args.outputname = seedname
|
||||||
erargs.teams = 1
|
args.outputpath = target.name
|
||||||
erargs.plando_options = PlandoOptions.from_set(meta.setdefault("plando_options",
|
args.teams = 1
|
||||||
{"bosses", "items", "connections", "texts"}))
|
args.plando_options = PlandoOptions.from_set(meta.setdefault("plando_options",
|
||||||
erargs.skip_prog_balancing = False
|
{"bosses", "items", "connections", "texts"}))
|
||||||
erargs.skip_output = False
|
args.skip_prog_balancing = False
|
||||||
erargs.spoiler_only = False
|
args.skip_output = False
|
||||||
erargs.csv_output = False
|
args.spoiler_only = False
|
||||||
|
args.csv_output = False
|
||||||
|
args.sprite = dict.fromkeys(range(1, args.multi+1), None)
|
||||||
|
args.sprite_pool = dict.fromkeys(range(1, args.multi+1), None)
|
||||||
|
|
||||||
name_counter = Counter()
|
name_counter = Counter()
|
||||||
for player, (playerfile, settings) in enumerate(gen_options.items(), 1):
|
for player, (playerfile, settings) in enumerate(gen_options.items(), 1):
|
||||||
for k, v in settings.items():
|
for k, v in settings.items():
|
||||||
if v is not None:
|
if v is not None:
|
||||||
if hasattr(erargs, k):
|
if hasattr(args, k):
|
||||||
getattr(erargs, k)[player] = v
|
getattr(args, k)[player] = v
|
||||||
else:
|
else:
|
||||||
setattr(erargs, k, {player: v})
|
setattr(args, k, {player: v})
|
||||||
|
|
||||||
if not erargs.name[player]:
|
if not args.name[player]:
|
||||||
erargs.name[player] = os.path.splitext(os.path.split(playerfile)[-1])[0]
|
args.name[player] = os.path.splitext(os.path.split(playerfile)[-1])[0]
|
||||||
erargs.name[player] = handle_name(erargs.name[player], player, name_counter)
|
args.name[player] = handle_name(args.name[player], player, name_counter)
|
||||||
if len(set(erargs.name.values())) != len(erargs.name):
|
if len(set(args.name.values())) != len(args.name):
|
||||||
raise Exception(f"Names have to be unique. Names: {Counter(erargs.name.values())}")
|
raise Exception(f"Names have to be unique. Names: {Counter(args.name.values())}")
|
||||||
ERmain(erargs, seed, baked_server_options=meta["server_options"])
|
ERmain(args, seed, baked_server_options=meta["server_options"])
|
||||||
|
|
||||||
return upload_to_db(target.name, sid, owner, race)
|
return upload_to_db(target.name, sid, owner, race)
|
||||||
thread_pool = concurrent.futures.ThreadPoolExecutor(max_workers=1)
|
|
||||||
|
thread_pool = DaemonThreadPoolExecutor(max_workers=1)
|
||||||
thread = thread_pool.submit(task)
|
thread = thread_pool.submit(task)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return thread.result(app.config["JOB_TIME"])
|
return thread.result(timeout)
|
||||||
except concurrent.futures.TimeoutError as e:
|
except concurrent.futures.TimeoutError as e:
|
||||||
if sid:
|
if sid:
|
||||||
with db_session:
|
with db_session:
|
||||||
@@ -173,11 +185,14 @@ def gen_game(gen_options: dict, meta: dict[str, Any] | None = None, owner=None,
|
|||||||
if gen is not None:
|
if gen is not None:
|
||||||
gen.state = STATE_ERROR
|
gen.state = STATE_ERROR
|
||||||
meta = json.loads(gen.meta)
|
meta = json.loads(gen.meta)
|
||||||
meta["error"] = (
|
meta["error"] = ("Allowed time for Generation exceeded, " +
|
||||||
"Allowed time for Generation exceeded, please consider generating locally instead. " +
|
"please consider generating locally instead. " +
|
||||||
e.__class__.__name__ + ": " + str(e))
|
format_exception(e))
|
||||||
gen.meta = json.dumps(meta)
|
gen.meta = json.dumps(meta)
|
||||||
commit()
|
commit()
|
||||||
|
except (KeyboardInterrupt, SystemExit):
|
||||||
|
# don't update db, retry next time
|
||||||
|
raise
|
||||||
except BaseException as e:
|
except BaseException as e:
|
||||||
if sid:
|
if sid:
|
||||||
with db_session:
|
with db_session:
|
||||||
@@ -185,10 +200,15 @@ def gen_game(gen_options: dict, meta: dict[str, Any] | None = None, owner=None,
|
|||||||
if gen is not None:
|
if gen is not None:
|
||||||
gen.state = STATE_ERROR
|
gen.state = STATE_ERROR
|
||||||
meta = json.loads(gen.meta)
|
meta = json.loads(gen.meta)
|
||||||
meta["error"] = (e.__class__.__name__ + ": " + str(e))
|
meta["error"] = format_exception(e)
|
||||||
gen.meta = json.dumps(meta)
|
gen.meta = json.dumps(meta)
|
||||||
commit()
|
commit()
|
||||||
raise
|
raise
|
||||||
|
finally:
|
||||||
|
# free resources claimed by thread pool, if possible
|
||||||
|
# NOTE: Timeout depends on the process being killed at some point
|
||||||
|
# since we can't actually cancel a running gen at the moment.
|
||||||
|
thread_pool.shutdown(wait=False, cancel_futures=True)
|
||||||
|
|
||||||
|
|
||||||
@app.route('/wait/<suuid:seed>')
|
@app.route('/wait/<suuid:seed>')
|
||||||
@@ -202,7 +222,9 @@ def wait_seed(seed: UUID):
|
|||||||
if not generation:
|
if not generation:
|
||||||
return "Generation not found."
|
return "Generation not found."
|
||||||
elif generation.state == STATE_ERROR:
|
elif generation.state == STATE_ERROR:
|
||||||
return render_template("seedError.html", seed_error=generation.meta)
|
meta = json.loads(generation.meta)
|
||||||
|
details = json.dumps(meta, indent=4).strip()
|
||||||
|
return render_template("seedError.html", seed_error=meta["error"], details=details)
|
||||||
return render_template("waitSeed.html", seed_id=seed_id)
|
return render_template("waitSeed.html", seed_id=seed_id)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -3,10 +3,10 @@ import threading
|
|||||||
import json
|
import json
|
||||||
|
|
||||||
from Utils import local_path, user_path
|
from Utils import local_path, user_path
|
||||||
from worlds.alttp.Rom import Sprite
|
|
||||||
|
|
||||||
|
|
||||||
def update_sprites_lttp():
|
def update_sprites_lttp():
|
||||||
|
from worlds.alttp.Rom import Sprite
|
||||||
from tkinter import Tk
|
from tkinter import Tk
|
||||||
from LttPAdjuster import get_image_for_sprite
|
from LttPAdjuster import get_image_for_sprite
|
||||||
from LttPAdjuster import BackgroundTaskProgress
|
from LttPAdjuster import BackgroundTaskProgress
|
||||||
|
|||||||
90
WebHostLib/markdown.py
Normal file
90
WebHostLib/markdown.py
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
import re
|
||||||
|
from collections import Counter
|
||||||
|
|
||||||
|
import mistune
|
||||||
|
from werkzeug.utils import secure_filename
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"ImgUrlRewriteInlineParser",
|
||||||
|
'render_markdown',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ImgUrlRewriteInlineParser(mistune.InlineParser):
|
||||||
|
relative_url_base: str
|
||||||
|
|
||||||
|
def __init__(self, relative_url_base: str, hard_wrap: bool = False) -> None:
|
||||||
|
super().__init__(hard_wrap)
|
||||||
|
self.relative_url_base = relative_url_base
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _find_game_name_by_folder_name(name: str) -> str | None:
|
||||||
|
from worlds.AutoWorld import AutoWorldRegister
|
||||||
|
|
||||||
|
for world_name, world_type in AutoWorldRegister.world_types.items():
|
||||||
|
if world_type.__module__ == f"worlds.{name}":
|
||||||
|
return world_name
|
||||||
|
return None
|
||||||
|
|
||||||
|
def parse_link(self, m: re.Match[str], state: mistune.InlineState) -> int | None:
|
||||||
|
res = super().parse_link(m, state)
|
||||||
|
if res is not None and state.tokens and state.tokens[-1]["type"] == "image":
|
||||||
|
image_token = state.tokens[-1]
|
||||||
|
url: str = image_token["attrs"]["url"]
|
||||||
|
if not url.startswith("/") and not "://" in url:
|
||||||
|
# replace relative URL to another world's doc folder with the webhost folder layout
|
||||||
|
if url.startswith("../../") and "/docs/" in self.relative_url_base:
|
||||||
|
parts = url.split("/", 4)
|
||||||
|
if parts[2] != ".." and parts[3] == "docs":
|
||||||
|
game_name = self._find_game_name_by_folder_name(parts[2])
|
||||||
|
if game_name is not None:
|
||||||
|
url = "/".join(parts[1:2] + [secure_filename(game_name)] + parts[4:])
|
||||||
|
# change relative URL to point to deployment folder
|
||||||
|
url = f"{self.relative_url_base}/{url}"
|
||||||
|
image_token['attrs']['url'] = url
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def render_markdown(path: str, img_url_base: str | None = None) -> str:
|
||||||
|
markdown = mistune.create_markdown(
|
||||||
|
escape=False,
|
||||||
|
plugins=[
|
||||||
|
"strikethrough",
|
||||||
|
"footnotes",
|
||||||
|
"table",
|
||||||
|
"speedup",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
heading_id_count: Counter[str] = Counter()
|
||||||
|
|
||||||
|
def heading_id(text: str) -> str:
|
||||||
|
nonlocal heading_id_count
|
||||||
|
|
||||||
|
# there is no good way to do this without regex
|
||||||
|
s = re.sub(r"[^\w\- ]", "", text.lower()).replace(" ", "-").strip("-")
|
||||||
|
n = heading_id_count[s]
|
||||||
|
heading_id_count[s] += 1
|
||||||
|
if n > 0:
|
||||||
|
s += f"-{n}"
|
||||||
|
return s
|
||||||
|
|
||||||
|
def id_hook(_: mistune.Markdown, state: mistune.BlockState) -> None:
|
||||||
|
for tok in state.tokens:
|
||||||
|
if tok["type"] == "heading" and tok["attrs"]["level"] < 4:
|
||||||
|
text = tok["text"]
|
||||||
|
assert isinstance(text, str)
|
||||||
|
unique_id = heading_id(text)
|
||||||
|
tok["attrs"]["id"] = unique_id
|
||||||
|
tok["text"] = f"<a href=\"#{unique_id}\">{text}</a>" # make header link to itself
|
||||||
|
|
||||||
|
markdown.before_render_hooks.append(id_hook)
|
||||||
|
if img_url_base:
|
||||||
|
markdown.inline = ImgUrlRewriteInlineParser(img_url_base)
|
||||||
|
|
||||||
|
with open(path, encoding="utf-8-sig") as f:
|
||||||
|
document = f.read()
|
||||||
|
html = markdown(document)
|
||||||
|
assert isinstance(html, str), "Unexpected mistune renderer in render_markdown"
|
||||||
|
return html
|
||||||
@@ -9,6 +9,7 @@ from werkzeug.utils import secure_filename
|
|||||||
|
|
||||||
from worlds.AutoWorld import AutoWorldRegister, World
|
from worlds.AutoWorld import AutoWorldRegister, World
|
||||||
from . import app, cache
|
from . import app, cache
|
||||||
|
from .markdown import render_markdown
|
||||||
from .models import Seed, Room, Command, UUID, uuid4
|
from .models import Seed, Room, Command, UUID, uuid4
|
||||||
from Utils import title_sorted
|
from Utils import title_sorted
|
||||||
|
|
||||||
@@ -27,49 +28,6 @@ def get_visible_worlds() -> dict[str, type(World)]:
|
|||||||
return worlds
|
return worlds
|
||||||
|
|
||||||
|
|
||||||
def render_markdown(path: str) -> str:
|
|
||||||
import mistune
|
|
||||||
from collections import Counter
|
|
||||||
|
|
||||||
markdown = mistune.create_markdown(
|
|
||||||
escape=False,
|
|
||||||
plugins=[
|
|
||||||
"strikethrough",
|
|
||||||
"footnotes",
|
|
||||||
"table",
|
|
||||||
"speedup",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
heading_id_count: Counter[str] = Counter()
|
|
||||||
|
|
||||||
def heading_id(text: str) -> str:
|
|
||||||
nonlocal heading_id_count
|
|
||||||
import re # there is no good way to do this without regex
|
|
||||||
|
|
||||||
s = re.sub(r"[^\w\- ]", "", text.lower()).replace(" ", "-").strip("-")
|
|
||||||
n = heading_id_count[s]
|
|
||||||
heading_id_count[s] += 1
|
|
||||||
if n > 0:
|
|
||||||
s += f"-{n}"
|
|
||||||
return s
|
|
||||||
|
|
||||||
def id_hook(_: mistune.Markdown, state: mistune.BlockState) -> None:
|
|
||||||
for tok in state.tokens:
|
|
||||||
if tok["type"] == "heading" and tok["attrs"]["level"] < 4:
|
|
||||||
text = tok["text"]
|
|
||||||
assert isinstance(text, str)
|
|
||||||
unique_id = heading_id(text)
|
|
||||||
tok["attrs"]["id"] = unique_id
|
|
||||||
tok["text"] = f"<a href=\"#{unique_id}\">{text}</a>" # make header link to itself
|
|
||||||
|
|
||||||
markdown.before_render_hooks.append(id_hook)
|
|
||||||
|
|
||||||
with open(path, encoding="utf-8-sig") as f:
|
|
||||||
document = f.read()
|
|
||||||
return markdown(document)
|
|
||||||
|
|
||||||
|
|
||||||
@app.errorhandler(404)
|
@app.errorhandler(404)
|
||||||
@app.errorhandler(jinja2.exceptions.TemplateNotFound)
|
@app.errorhandler(jinja2.exceptions.TemplateNotFound)
|
||||||
def page_not_found(err):
|
def page_not_found(err):
|
||||||
@@ -91,10 +49,9 @@ def game_info(game, lang):
|
|||||||
theme = get_world_theme(game)
|
theme = get_world_theme(game)
|
||||||
secure_game_name = secure_filename(game)
|
secure_game_name = secure_filename(game)
|
||||||
lang = secure_filename(lang)
|
lang = secure_filename(lang)
|
||||||
document = render_markdown(os.path.join(
|
file_dir = os.path.join(app.static_folder, "generated", "docs", secure_game_name)
|
||||||
app.static_folder, "generated", "docs",
|
file_dir_url = url_for("static", filename=f"generated/docs/{secure_game_name}")
|
||||||
secure_game_name, f"{lang}_{secure_game_name}.md"
|
document = render_markdown(os.path.join(file_dir, f"{lang}_{secure_game_name}.md"), file_dir_url)
|
||||||
))
|
|
||||||
return render_template(
|
return render_template(
|
||||||
"markdown_document.html",
|
"markdown_document.html",
|
||||||
title=f"{game} Guide",
|
title=f"{game} Guide",
|
||||||
@@ -119,10 +76,9 @@ def tutorial(game: str, file: str):
|
|||||||
theme = get_world_theme(game)
|
theme = get_world_theme(game)
|
||||||
secure_game_name = secure_filename(game)
|
secure_game_name = secure_filename(game)
|
||||||
file = secure_filename(file)
|
file = secure_filename(file)
|
||||||
document = render_markdown(os.path.join(
|
file_dir = os.path.join(app.static_folder, "generated", "docs", secure_game_name)
|
||||||
app.static_folder, "generated", "docs",
|
file_dir_url = url_for("static", filename=f"generated/docs/{secure_game_name}")
|
||||||
secure_game_name, file+".md"
|
document = render_markdown(os.path.join(file_dir, f"{file}.md"), file_dir_url)
|
||||||
))
|
|
||||||
return render_template(
|
return render_template(
|
||||||
"markdown_document.html",
|
"markdown_document.html",
|
||||||
title=f"{game} Guide",
|
title=f"{game} Guide",
|
||||||
@@ -133,6 +89,15 @@ def tutorial(game: str, file: str):
|
|||||||
return abort(404)
|
return abort(404)
|
||||||
|
|
||||||
|
|
||||||
|
@app.route('/tutorial/<string:game>/<string:file>/<string:lang>')
|
||||||
|
def tutorial_redirect(game: str, file: str, lang: str):
|
||||||
|
"""
|
||||||
|
Permanent redirect old tutorial URLs to new ones to keep search engines happy.
|
||||||
|
e.g. /tutorial/Archipelago/setup/en -> /tutorial/Archipelago/setup_en
|
||||||
|
"""
|
||||||
|
return redirect(url_for("tutorial", game=game, file=f"{file}_{lang}"), code=301)
|
||||||
|
|
||||||
|
|
||||||
@app.route('/tutorial/')
|
@app.route('/tutorial/')
|
||||||
@cache.cached()
|
@cache.cached()
|
||||||
def tutorial_landing():
|
def tutorial_landing():
|
||||||
@@ -251,7 +216,10 @@ def host_room(room: UUID):
|
|||||||
# indicate that the page should reload to get the assigned port
|
# indicate that the page should reload to get the assigned port
|
||||||
should_refresh = ((not room.last_port and now - room.creation_time < datetime.timedelta(seconds=3))
|
should_refresh = ((not room.last_port and now - room.creation_time < datetime.timedelta(seconds=3))
|
||||||
or room.last_activity < now - datetime.timedelta(seconds=room.timeout))
|
or room.last_activity < now - datetime.timedelta(seconds=room.timeout))
|
||||||
with db_session:
|
|
||||||
|
if now - room.last_activity > datetime.timedelta(minutes=1):
|
||||||
|
# we only set last_activity if needed, otherwise parallel access on /room will cause an internal server error
|
||||||
|
# due to "pony.orm.core.OptimisticCheckError: Object Room was updated outside of current transaction"
|
||||||
room.last_activity = now # will trigger a spinup, if it's not already running
|
room.last_activity = now # will trigger a spinup, if it's not already running
|
||||||
|
|
||||||
browser_tokens = "Mozilla", "Chrome", "Safari"
|
browser_tokens = "Mozilla", "Chrome", "Safari"
|
||||||
@@ -259,9 +227,9 @@ def host_room(room: UUID):
|
|||||||
or "Discordbot" in request.user_agent.string
|
or "Discordbot" in request.user_agent.string
|
||||||
or not any(browser_token in request.user_agent.string for browser_token in browser_tokens))
|
or not any(browser_token in request.user_agent.string for browser_token in browser_tokens))
|
||||||
|
|
||||||
def get_log(max_size: int = 0 if automated else 1024000) -> str:
|
def get_log(max_size: int = 0 if automated else 1024000) -> Tuple[str, int]:
|
||||||
if max_size == 0:
|
if max_size == 0:
|
||||||
return "…"
|
return "…", 0
|
||||||
try:
|
try:
|
||||||
with open(os.path.join("logs", str(room.id) + ".txt"), "rb") as log:
|
with open(os.path.join("logs", str(room.id) + ".txt"), "rb") as log:
|
||||||
raw_size = 0
|
raw_size = 0
|
||||||
@@ -272,9 +240,9 @@ def host_room(room: UUID):
|
|||||||
break
|
break
|
||||||
raw_size += len(block)
|
raw_size += len(block)
|
||||||
fragments.append(block.decode("utf-8"))
|
fragments.append(block.decode("utf-8"))
|
||||||
return "".join(fragments)
|
return "".join(fragments), raw_size
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
return ""
|
return "", 0
|
||||||
|
|
||||||
return render_template("hostRoom.html", room=room, should_refresh=should_refresh, get_log=get_log)
|
return render_template("hostRoom.html", room=room, should_refresh=should_refresh, get_log=get_log)
|
||||||
|
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ def filter_rst_to_html(text: str) -> str:
|
|||||||
lines = text.splitlines()
|
lines = text.splitlines()
|
||||||
text = lines[0] + "\n" + dedent("\n".join(lines[1:]))
|
text = lines[0] + "\n" + dedent("\n".join(lines[1:]))
|
||||||
|
|
||||||
return publish_parts(text, writer_name='html', settings=None, settings_overrides={
|
return publish_parts(text, writer='html', settings=None, settings_overrides={
|
||||||
'raw_enable': False,
|
'raw_enable': False,
|
||||||
'file_insertion_enabled': False,
|
'file_insertion_enabled': False,
|
||||||
'output_encoding': 'unicode'
|
'output_encoding': 'unicode'
|
||||||
@@ -155,7 +155,9 @@ def generate_weighted_yaml(game: str):
|
|||||||
options = {}
|
options = {}
|
||||||
|
|
||||||
for key, val in request.form.items():
|
for key, val in request.form.items():
|
||||||
if "||" not in key:
|
if val == "_ensure-empty-list":
|
||||||
|
options[key] = {}
|
||||||
|
elif "||" not in key:
|
||||||
if len(str(val)) == 0:
|
if len(str(val)) == 0:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -212,8 +214,11 @@ def generate_yaml(game: str):
|
|||||||
if request.method == "POST":
|
if request.method == "POST":
|
||||||
options = {}
|
options = {}
|
||||||
intent_generate = False
|
intent_generate = False
|
||||||
|
|
||||||
for key, val in request.form.items(multi=True):
|
for key, val in request.form.items(multi=True):
|
||||||
if key in options:
|
if val == "_ensure-empty-list":
|
||||||
|
options[key] = []
|
||||||
|
elif options.get(key):
|
||||||
if not isinstance(options[key], list):
|
if not isinstance(options[key], list):
|
||||||
options[key] = [options[key]]
|
options[key] = [options[key]]
|
||||||
options[key].append(val)
|
options[key].append(val)
|
||||||
@@ -226,7 +231,7 @@ def generate_yaml(game: str):
|
|||||||
if key_parts[-1] == "qty":
|
if key_parts[-1] == "qty":
|
||||||
if key_parts[0] not in options:
|
if key_parts[0] not in options:
|
||||||
options[key_parts[0]] = {}
|
options[key_parts[0]] = {}
|
||||||
if val != "0":
|
if val and val != "0":
|
||||||
options[key_parts[0]][key_parts[1]] = int(val)
|
options[key_parts[0]][key_parts[1]] = int(val)
|
||||||
del options[key]
|
del options[key]
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,13 @@
|
|||||||
flask>=3.1.1
|
flask>=3.1.1
|
||||||
werkzeug>=3.1.3
|
werkzeug>=3.1.3
|
||||||
pony>=0.7.19
|
pony>=0.7.19; python_version <= '3.12'
|
||||||
|
pony @ git+https://github.com/black-sliver/pony@7feb1221953b7fa4a6735466bf21a8b4d35e33ba#0.7.19; python_version >= '3.13'
|
||||||
waitress>=3.0.2
|
waitress>=3.0.2
|
||||||
Flask-Caching>=2.3.0
|
Flask-Caching>=2.3.0
|
||||||
Flask-Compress>=1.17
|
Flask-Compress==1.18 # pkg_resources can't resolve the "backports.zstd" dependency of >1.18, breaking ModuleUpdate.py
|
||||||
Flask-Limiter>=3.12
|
Flask-Limiter>=3.12
|
||||||
bokeh>=3.6.3
|
bokeh>=3.6.3
|
||||||
markupsafe>=3.0.2
|
markupsafe>=3.0.2
|
||||||
setproctitle>=1.3.5
|
setproctitle>=1.3.5
|
||||||
mistune>=3.1.3
|
mistune>=3.1.3
|
||||||
|
docutils>=0.22.2
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ is to ensure items necessary to complete the game will be accessible to the play
|
|||||||
rules allowing certain items to be placed in normally unreachable locations, provided the player has indicated they are
|
rules allowing certain items to be placed in normally unreachable locations, provided the player has indicated they are
|
||||||
comfortable exploiting certain glitches in the game.
|
comfortable exploiting certain glitches in the game.
|
||||||
|
|
||||||
## I want to add a game to the Archipelago randomizer. How do I do that?
|
## I want to develop a game implementation for Archipelago. How do I do that?
|
||||||
|
|
||||||
The best way to get started is to take a look at our code on GitHub:
|
The best way to get started is to take a look at our code on GitHub:
|
||||||
[Archipelago GitHub Page](https://github.com/ArchipelagoMW/Archipelago).
|
[Archipelago GitHub Page](https://github.com/ArchipelagoMW/Archipelago).
|
||||||
@@ -77,4 +77,5 @@ There, you will find examples of games in the `worlds` folder:
|
|||||||
You may also find developer documentation in the `docs` folder:
|
You may also find developer documentation in the `docs` folder:
|
||||||
[/docs Folder in Archipelago Code](https://github.com/ArchipelagoMW/Archipelago/tree/main/docs).
|
[/docs Folder in Archipelago Code](https://github.com/ArchipelagoMW/Archipelago/tree/main/docs).
|
||||||
|
|
||||||
If you have more questions, feel free to ask in the **#ap-world-dev** channel on our Discord.
|
If you have more questions regarding development of a game implementation, feel free to ask in the **#ap-world-dev**
|
||||||
|
channel on our Discord.
|
||||||
|
|||||||
@@ -1,49 +1,43 @@
|
|||||||
|
let updateSection = (sectionName, fakeDOM) => {
|
||||||
|
document.getElementById(sectionName).innerHTML = fakeDOM.getElementById(sectionName).innerHTML;
|
||||||
|
}
|
||||||
|
|
||||||
window.addEventListener('load', () => {
|
window.addEventListener('load', () => {
|
||||||
// Reload tracker every 15 seconds
|
// Reload tracker every 60 seconds (sync'd)
|
||||||
const url = window.location;
|
const url = window.location;
|
||||||
setInterval(() => {
|
// Note: This synchronization code is adapted from code in trackerCommon.js
|
||||||
const ajax = new XMLHttpRequest();
|
const targetSecond = parseInt(document.getElementById('player-tracker').getAttribute('data-second')) + 3;
|
||||||
ajax.onreadystatechange = () => {
|
console.log("Target second of refresh: " + targetSecond);
|
||||||
if (ajax.readyState !== 4) { return; }
|
|
||||||
|
|
||||||
// Create a fake DOM using the returned HTML
|
let getSleepTimeSeconds = () => {
|
||||||
const domParser = new DOMParser();
|
// -40 % 60 is -40, which is absolutely wrong and should burn
|
||||||
const fakeDOM = domParser.parseFromString(ajax.responseText, 'text/html');
|
var sleepSeconds = (((targetSecond - new Date().getSeconds()) % 60) + 60) % 60;
|
||||||
|
return sleepSeconds || 60;
|
||||||
// Update item tracker
|
|
||||||
document.getElementById('inventory-table').innerHTML = fakeDOM.getElementById('inventory-table').innerHTML;
|
|
||||||
// Update only counters in the location-table
|
|
||||||
let counters = document.getElementsByClassName('counter');
|
|
||||||
const fakeCounters = fakeDOM.getElementsByClassName('counter');
|
|
||||||
for (let i = 0; i < counters.length; i++) {
|
|
||||||
counters[i].innerHTML = fakeCounters[i].innerHTML;
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
ajax.open('GET', url);
|
|
||||||
ajax.send();
|
|
||||||
}, 15000)
|
|
||||||
|
|
||||||
// Collapsible advancement sections
|
let updateTracker = () => {
|
||||||
const categories = document.getElementsByClassName("location-category");
|
const ajax = new XMLHttpRequest();
|
||||||
for (let category of categories) {
|
ajax.onreadystatechange = () => {
|
||||||
let hide_id = category.id.split('_')[0];
|
if (ajax.readyState !== 4) { return; }
|
||||||
if (hide_id === 'Total') {
|
|
||||||
continue;
|
// Create a fake DOM using the returned HTML
|
||||||
}
|
const domParser = new DOMParser();
|
||||||
category.addEventListener('click', function() {
|
const fakeDOM = domParser.parseFromString(ajax.responseText, 'text/html');
|
||||||
// Toggle the advancement list
|
|
||||||
document.getElementById(hide_id).classList.toggle("hide");
|
// Update dynamic sections
|
||||||
// Change text of the header
|
updateSection('player-info', fakeDOM);
|
||||||
const tab_header = document.getElementById(hide_id+'_header').children[0];
|
updateSection('section-filler', fakeDOM);
|
||||||
const orig_text = tab_header.innerHTML;
|
updateSection('section-terran', fakeDOM);
|
||||||
let new_text;
|
updateSection('section-zerg', fakeDOM);
|
||||||
if (orig_text.includes("▼")) {
|
updateSection('section-protoss', fakeDOM);
|
||||||
new_text = orig_text.replace("▼", "▲");
|
updateSection('section-nova', fakeDOM);
|
||||||
}
|
updateSection('section-kerrigan', fakeDOM);
|
||||||
else {
|
updateSection('section-keys', fakeDOM);
|
||||||
new_text = orig_text.replace("▲", "▼");
|
updateSection('section-locations', fakeDOM);
|
||||||
}
|
};
|
||||||
tab_header.innerHTML = new_text;
|
ajax.open('GET', url);
|
||||||
});
|
ajax.send();
|
||||||
}
|
updater = setTimeout(updateTracker, getSleepTimeSeconds() * 1000);
|
||||||
|
};
|
||||||
|
window.updater = setTimeout(updateTracker, getSleepTimeSeconds() * 1000);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -28,7 +28,6 @@
|
|||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
font-family: LondrinaSolid-Regular, sans-serif;
|
font-family: LondrinaSolid-Regular, sans-serif;
|
||||||
text-transform: uppercase;
|
text-transform: uppercase;
|
||||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
|
||||||
width: 100%;
|
width: 100%;
|
||||||
text-shadow: 1px 1px 4px #000000;
|
text-shadow: 1px 1px 4px #000000;
|
||||||
}
|
}
|
||||||
@@ -37,7 +36,6 @@
|
|||||||
font-size: 38px;
|
font-size: 38px;
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
font-family: LondrinaSolid-Light, sans-serif;
|
font-family: LondrinaSolid-Light, sans-serif;
|
||||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
|
||||||
width: 100%;
|
width: 100%;
|
||||||
margin-top: 20px;
|
margin-top: 20px;
|
||||||
margin-bottom: 0.5rem;
|
margin-bottom: 0.5rem;
|
||||||
@@ -50,7 +48,6 @@
|
|||||||
font-family: LexendDeca-Regular, sans-serif;
|
font-family: LexendDeca-Regular, sans-serif;
|
||||||
text-transform: none;
|
text-transform: none;
|
||||||
text-align: left;
|
text-align: left;
|
||||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
|
||||||
width: 100%;
|
width: 100%;
|
||||||
margin-bottom: 0.5rem;
|
margin-bottom: 0.5rem;
|
||||||
}
|
}
|
||||||
@@ -59,7 +56,6 @@
|
|||||||
font-family: LexendDeca-Regular, sans-serif;
|
font-family: LexendDeca-Regular, sans-serif;
|
||||||
text-transform: none;
|
text-transform: none;
|
||||||
font-size: 24px;
|
font-size: 24px;
|
||||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
|
||||||
margin-bottom: 24px;
|
margin-bottom: 24px;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -67,14 +63,12 @@
|
|||||||
font-family: LexendDeca-Regular, sans-serif;
|
font-family: LexendDeca-Regular, sans-serif;
|
||||||
text-transform: none;
|
text-transform: none;
|
||||||
font-size: 22px;
|
font-size: 22px;
|
||||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.markdown h6, .markdown details summary.h6{
|
.markdown h6, .markdown details summary.h6{
|
||||||
font-family: LexendDeca-Regular, sans-serif;
|
font-family: LexendDeca-Regular, sans-serif;
|
||||||
text-transform: none;
|
text-transform: none;
|
||||||
font-size: 20px;
|
font-size: 20px;
|
||||||
cursor: pointer; /* TODO: remove once we drop showdown.js */
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.markdown h4, .markdown h5, .markdown h6{
|
.markdown h4, .markdown h5, .markdown h6{
|
||||||
|
|||||||
@@ -1,160 +1,279 @@
|
|||||||
#player-tracker-wrapper{
|
*{
|
||||||
margin: 0;
|
margin: 0;
|
||||||
|
font-family: "JuraBook", monospace;
|
||||||
|
}
|
||||||
|
body{
|
||||||
|
--icon-size: 36px;
|
||||||
|
--item-class-padding: 4px;
|
||||||
|
}
|
||||||
|
a{
|
||||||
|
color: #1ae;
|
||||||
}
|
}
|
||||||
|
|
||||||
#tracker-table td {
|
/* Section colours */
|
||||||
vertical-align: top;
|
#player-info{
|
||||||
|
background-color: #37a;
|
||||||
|
}
|
||||||
|
.player-tracker{
|
||||||
|
max-width: 100%;
|
||||||
|
}
|
||||||
|
.tracker-section{
|
||||||
|
background-color: grey;
|
||||||
|
}
|
||||||
|
#terran-items{
|
||||||
|
background-color: #3a7;
|
||||||
|
}
|
||||||
|
#zerg-items{
|
||||||
|
background-color: #d94;
|
||||||
|
}
|
||||||
|
#protoss-items{
|
||||||
|
background-color: #37a;
|
||||||
|
}
|
||||||
|
#nova-items{
|
||||||
|
background-color: #777;
|
||||||
|
}
|
||||||
|
#kerrigan-items{
|
||||||
|
background-color: #a37;
|
||||||
|
}
|
||||||
|
#keys{
|
||||||
|
background-color: #aa2;
|
||||||
}
|
}
|
||||||
|
|
||||||
.inventory-table-area{
|
/* Sections */
|
||||||
border: 2px solid #000000;
|
.section-body{
|
||||||
border-radius: 4px;
|
display: flex;
|
||||||
padding: 3px 10px 3px 10px;
|
flex-flow: row wrap;
|
||||||
|
justify-content: flex-start;
|
||||||
|
align-items: flex-start;
|
||||||
|
padding-bottom: 3px;
|
||||||
|
}
|
||||||
|
.section-body-2{
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
.tracker-section:has(input.collapse-section[type=checkbox]:checked) .section-body,
|
||||||
|
.tracker-section:has(input.collapse-section[type=checkbox]:checked) .section-body-2{
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
.section-title{
|
||||||
|
position: relative;
|
||||||
|
border-bottom: 3px solid black;
|
||||||
|
/* Prevent text selection */
|
||||||
|
user-select: none;
|
||||||
|
-webkit-user-select: none;
|
||||||
|
-ms-user-select: none;
|
||||||
|
}
|
||||||
|
input[type="checkbox"]{
|
||||||
|
position: absolute;
|
||||||
|
cursor: pointer;
|
||||||
|
opacity: 0;
|
||||||
|
z-index: 1;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
}
|
||||||
|
.section-title:hover h2{
|
||||||
|
text-shadow: 0 0 4px #ddd;
|
||||||
|
}
|
||||||
|
.f {
|
||||||
|
display: flex;
|
||||||
|
overflow: hidden;
|
||||||
}
|
}
|
||||||
|
|
||||||
.inventory-table-area:has(.inventory-table-terran) {
|
/* Acquire item filters */
|
||||||
width: 690px;
|
.tracker-section img{
|
||||||
background-color: #525494;
|
height: 100%;
|
||||||
|
width: var(--icon-size);
|
||||||
|
height: var(--icon-size);
|
||||||
|
background-color: black;
|
||||||
|
}
|
||||||
|
.unacquired, .lvl-0 .f{
|
||||||
|
filter: grayscale(100%) contrast(80%) brightness(42%) blur(0.5px);
|
||||||
|
}
|
||||||
|
.spacer{
|
||||||
|
width: var(--icon-size);
|
||||||
|
height: var(--icon-size);
|
||||||
}
|
}
|
||||||
|
|
||||||
.inventory-table-area:has(.inventory-table-zerg) {
|
/* Item groups */
|
||||||
width: 360px;
|
.item-class{
|
||||||
background-color: #9d60d2;
|
display: flex;
|
||||||
|
flex-flow: column;
|
||||||
|
justify-content: center;
|
||||||
|
padding: var(--item-class-padding);
|
||||||
|
}
|
||||||
|
.item-class-header{
|
||||||
|
display: flex;
|
||||||
|
flex-flow: row;
|
||||||
|
}
|
||||||
|
.item-class-upgrades{
|
||||||
|
/* Note: {display: flex; flex-flow: column wrap} */
|
||||||
|
/* just breaks on Firefox (width does not scale to content) */
|
||||||
|
display: grid;
|
||||||
|
grid-template-rows: repeat(4, auto);
|
||||||
|
grid-auto-flow: column;
|
||||||
}
|
}
|
||||||
|
|
||||||
.inventory-table-area:has(.inventory-table-protoss) {
|
/* Subsections */
|
||||||
width: 400px;
|
.section-toc{
|
||||||
background-color: #d2b260;
|
display: flex;
|
||||||
|
flex-direction: row;
|
||||||
|
}
|
||||||
|
.toc-box{
|
||||||
|
position: relative;
|
||||||
|
padding-left: 15px;
|
||||||
|
padding-right: 15px;
|
||||||
|
}
|
||||||
|
.toc-box:hover{
|
||||||
|
text-shadow: 0 0 7px white;
|
||||||
|
}
|
||||||
|
.ss-header{
|
||||||
|
position: relative;
|
||||||
|
text-align: center;
|
||||||
|
writing-mode: sideways-lr;
|
||||||
|
user-select: none;
|
||||||
|
padding-top: 5px;
|
||||||
|
font-size: 115%;
|
||||||
|
}
|
||||||
|
.tracker-section:has(input.ss-1-toggle:checked) .ss-1{
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
.tracker-section:has(input.ss-2-toggle:checked) .ss-2{
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
.tracker-section:has(input.ss-3-toggle:checked) .ss-3{
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
.tracker-section:has(input.ss-4-toggle:checked) .ss-4{
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
.tracker-section:has(input.ss-5-toggle:checked) .ss-5{
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
.tracker-section:has(input.ss-6-toggle:checked) .ss-6{
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
.tracker-section:has(input.ss-7-toggle:checked) .ss-7{
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
.tracker-section:has(input.ss-1-toggle:hover) .ss-1{
|
||||||
|
background-color: #fff5;
|
||||||
|
box-shadow: 0 0 1px 1px white;
|
||||||
|
}
|
||||||
|
.tracker-section:has(input.ss-2-toggle:hover) .ss-2{
|
||||||
|
background-color: #fff5;
|
||||||
|
box-shadow: 0 0 1px 1px white;
|
||||||
|
}
|
||||||
|
.tracker-section:has(input.ss-3-toggle:hover) .ss-3{
|
||||||
|
background-color: #fff5;
|
||||||
|
box-shadow: 0 0 1px 1px white;
|
||||||
|
}
|
||||||
|
.tracker-section:has(input.ss-4-toggle:hover) .ss-4{
|
||||||
|
background-color: #fff5;
|
||||||
|
box-shadow: 0 0 1px 1px white;
|
||||||
|
}
|
||||||
|
.tracker-section:has(input.ss-5-toggle:hover) .ss-5{
|
||||||
|
background-color: #fff5;
|
||||||
|
box-shadow: 0 0 1px 1px white;
|
||||||
|
}
|
||||||
|
.tracker-section:has(input.ss-6-toggle:hover) .ss-6{
|
||||||
|
background-color: #fff5;
|
||||||
|
box-shadow: 0 0 1px 1px white;
|
||||||
|
}
|
||||||
|
.tracker-section:has(input.ss-7-toggle:hover) .ss-7{
|
||||||
|
background-color: #fff5;
|
||||||
|
box-shadow: 0 0 1px 1px white;
|
||||||
}
|
}
|
||||||
|
|
||||||
#tracker-table .inventory-table td{
|
/* Progressive items */
|
||||||
width: 40px;
|
.progressive{
|
||||||
height: 40px;
|
max-height: var(--icon-size);
|
||||||
text-align: center;
|
display: contents;
|
||||||
vertical-align: middle;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.inventory-table td.title{
|
.lvl-0 > :nth-child(2),
|
||||||
padding-top: 10px;
|
.lvl-0 > :nth-child(3),
|
||||||
height: 20px;
|
.lvl-0 > :nth-child(4),
|
||||||
font-family: "JuraBook", monospace;
|
.lvl-0 > :nth-child(5){
|
||||||
font-size: 16px;
|
display: none;
|
||||||
font-weight: bold;
|
}
|
||||||
|
.lvl-1 > :nth-child(2),
|
||||||
|
.lvl-1 > :nth-child(3),
|
||||||
|
.lvl-1 > :nth-child(4),
|
||||||
|
.lvl-1 > :nth-child(5){
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
.lvl-2 > :nth-child(1),
|
||||||
|
.lvl-2 > :nth-child(3),
|
||||||
|
.lvl-2 > :nth-child(4),
|
||||||
|
.lvl-2 > :nth-child(5){
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
.lvl-3 > :nth-child(1),
|
||||||
|
.lvl-3 > :nth-child(2),
|
||||||
|
.lvl-3 > :nth-child(4),
|
||||||
|
.lvl-3 > :nth-child(5){
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
.lvl-4 > :nth-child(1),
|
||||||
|
.lvl-4 > :nth-child(2),
|
||||||
|
.lvl-4 > :nth-child(3),
|
||||||
|
.lvl-4 > :nth-child(5){
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
.lvl-5 > :nth-child(1),
|
||||||
|
.lvl-5 > :nth-child(2),
|
||||||
|
.lvl-5 > :nth-child(3),
|
||||||
|
.lvl-5 > :nth-child(4){
|
||||||
|
display: none;
|
||||||
}
|
}
|
||||||
|
|
||||||
.inventory-table img{
|
/* Filler item counters */
|
||||||
height: 100%;
|
.item-counter{
|
||||||
max-width: 40px;
|
display: table;
|
||||||
max-height: 40px;
|
text-align: center;
|
||||||
border: 1px solid #000000;
|
padding: var(--item-class-padding);
|
||||||
filter: grayscale(100%) contrast(75%) brightness(20%);
|
}
|
||||||
background-color: black;
|
.item-count{
|
||||||
|
display: table-cell;
|
||||||
|
vertical-align: middle;
|
||||||
|
padding-left: 3px;
|
||||||
|
padding-right: 15px;
|
||||||
}
|
}
|
||||||
|
|
||||||
.inventory-table img.acquired{
|
/* Hidden items */
|
||||||
filter: none;
|
.hidden-class:not(:has(img.acquired)){
|
||||||
background-color: black;
|
display: none;
|
||||||
|
}
|
||||||
|
.hidden-item:not(.acquired){
|
||||||
|
display:none;
|
||||||
}
|
}
|
||||||
|
|
||||||
.inventory-table .tint-terran img.acquired {
|
/* Keys */
|
||||||
filter: sepia(100%) saturate(300%) brightness(130%) hue-rotate(120deg)
|
#keys ol, #keys ul{
|
||||||
|
columns: 3;
|
||||||
|
-webkit-columns: 3;
|
||||||
|
-moz-columns: 3;
|
||||||
|
}
|
||||||
|
#keys li{
|
||||||
|
padding-right: 15pt;
|
||||||
}
|
}
|
||||||
|
|
||||||
.inventory-table .tint-protoss img.acquired {
|
/* Locations */
|
||||||
filter: sepia(100%) saturate(1000%) brightness(110%) hue-rotate(180deg)
|
#section-locations{
|
||||||
|
padding-left: 5px;
|
||||||
|
}
|
||||||
|
@media only screen and (min-width: 120ch){
|
||||||
|
#section-locations ul{
|
||||||
|
columns: 2;
|
||||||
|
-webkit-columns: 2;
|
||||||
|
-moz-columns: 2;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#locations li.checked{
|
||||||
|
list-style-type: "✔ ";
|
||||||
}
|
}
|
||||||
|
|
||||||
.inventory-table .tint-level-1 img.acquired {
|
/* Allowing scrolling down a little further */
|
||||||
filter: sepia(100%) saturate(1000%) brightness(110%) hue-rotate(60deg)
|
.bottom-padding{
|
||||||
}
|
min-height: 33vh;
|
||||||
|
|
||||||
.inventory-table .tint-level-2 img.acquired {
|
|
||||||
filter: sepia(100%) saturate(1000%) brightness(110%) hue-rotate(60deg) hue-rotate(120deg)
|
|
||||||
}
|
|
||||||
|
|
||||||
.inventory-table .tint-level-3 img.acquired {
|
|
||||||
filter: sepia(100%) saturate(1000%) brightness(110%) hue-rotate(60deg) hue-rotate(240deg)
|
|
||||||
}
|
|
||||||
|
|
||||||
.inventory-table div.counted-item {
|
|
||||||
position: relative;
|
|
||||||
}
|
|
||||||
|
|
||||||
.inventory-table div.item-count {
|
|
||||||
width: 160px;
|
|
||||||
text-align: left;
|
|
||||||
color: black;
|
|
||||||
font-family: "JuraBook", monospace;
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table{
|
|
||||||
border: 2px solid #000000;
|
|
||||||
border-radius: 4px;
|
|
||||||
background-color: #87b678;
|
|
||||||
padding: 10px 3px 3px;
|
|
||||||
font-family: "JuraBook", monospace;
|
|
||||||
font-size: 16px;
|
|
||||||
font-weight: bold;
|
|
||||||
cursor: default;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table table{
|
|
||||||
width: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table th{
|
|
||||||
vertical-align: middle;
|
|
||||||
text-align: left;
|
|
||||||
padding-right: 10px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table td{
|
|
||||||
padding-top: 2px;
|
|
||||||
padding-bottom: 2px;
|
|
||||||
line-height: 20px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table td.counter {
|
|
||||||
text-align: right;
|
|
||||||
font-size: 14px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table td.toggle-arrow {
|
|
||||||
text-align: right;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table tr#Total-header {
|
|
||||||
font-weight: bold;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table img{
|
|
||||||
height: 100%;
|
|
||||||
max-width: 30px;
|
|
||||||
max-height: 30px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table tbody.locations {
|
|
||||||
font-size: 16px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table td.location-name {
|
|
||||||
padding-left: 16px;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table td:has(.location-column) {
|
|
||||||
vertical-align: top;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table .location-column {
|
|
||||||
width: 100%;
|
|
||||||
height: 100%;
|
|
||||||
}
|
|
||||||
|
|
||||||
#location-table .location-column .spacer {
|
|
||||||
min-height: 24px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.hide {
|
|
||||||
display: none;
|
|
||||||
}
|
}
|
||||||
3965
WebHostLib/static/styles/sc2TrackerAtlas.css
Normal file
3965
WebHostLib/static/styles/sc2TrackerAtlas.css
Normal file
File diff suppressed because it is too large
Load Diff
@@ -72,3 +72,13 @@ code{
|
|||||||
padding-right: 0.25rem;
|
padding-right: 0.25rem;
|
||||||
color: #000000;
|
color: #000000;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
code.grassy {
|
||||||
|
background-color: #b5e9a4;
|
||||||
|
border: 1px solid #2a6c2f;
|
||||||
|
white-space: preserve;
|
||||||
|
text-align: left;
|
||||||
|
display: block;
|
||||||
|
font-size: 14px;
|
||||||
|
line-height: 20px;
|
||||||
|
}
|
||||||
|
|||||||
@@ -13,3 +13,7 @@
|
|||||||
min-height: 360px;
|
min-height: 360px;
|
||||||
text-align: center;
|
text-align: center;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
h2, h4 {
|
||||||
|
color: #ffffff;
|
||||||
|
}
|
||||||
|
|||||||
@@ -98,7 +98,7 @@
|
|||||||
<td>
|
<td>
|
||||||
{% if hint.finding_player == player %}
|
{% if hint.finding_player == player %}
|
||||||
<b>{{ player_names_with_alias[(team, hint.finding_player)] }}</b>
|
<b>{{ player_names_with_alias[(team, hint.finding_player)] }}</b>
|
||||||
{% elif get_slot_info(team, hint.finding_player).type == 2 %}
|
{% elif get_slot_info(hint.finding_player).type == 2 %}
|
||||||
<i>{{ player_names_with_alias[(team, hint.finding_player)] }}</i>
|
<i>{{ player_names_with_alias[(team, hint.finding_player)] }}</i>
|
||||||
{% else %}
|
{% else %}
|
||||||
<a href="{{ url_for("get_player_tracker", tracker=room.tracker, tracked_team=team, tracked_player=hint.finding_player) }}">
|
<a href="{{ url_for("get_player_tracker", tracker=room.tracker, tracked_team=team, tracked_player=hint.finding_player) }}">
|
||||||
@@ -109,7 +109,7 @@
|
|||||||
<td>
|
<td>
|
||||||
{% if hint.receiving_player == player %}
|
{% if hint.receiving_player == player %}
|
||||||
<b>{{ player_names_with_alias[(team, hint.receiving_player)] }}</b>
|
<b>{{ player_names_with_alias[(team, hint.receiving_player)] }}</b>
|
||||||
{% elif get_slot_info(team, hint.receiving_player).type == 2 %}
|
{% elif get_slot_info(hint.receiving_player).type == 2 %}
|
||||||
<i>{{ player_names_with_alias[(team, hint.receiving_player)] }}</i>
|
<i>{{ player_names_with_alias[(team, hint.receiving_player)] }}</i>
|
||||||
{% else %}
|
{% else %}
|
||||||
<a href="{{ url_for("get_player_tracker", tracker=room.tracker, tracked_team=team, tracked_player=hint.receiving_player) }}">
|
<a href="{{ url_for("get_player_tracker", tracker=room.tracker, tracked_team=team, tracked_player=hint.receiving_player) }}">
|
||||||
|
|||||||
@@ -58,8 +58,7 @@
|
|||||||
Open Log File...
|
Open Log File...
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
{% set log = get_log() -%}
|
{% set log, log_len = get_log() -%}
|
||||||
{%- set log_len = log | length - 1 if log.endswith("…") else log | length -%}
|
|
||||||
<div id="logger" style="white-space: pre">{{ log }}</div>
|
<div id="logger" style="white-space: pre">{{ log }}</div>
|
||||||
<script>
|
<script>
|
||||||
let url = '{{ url_for('display_log', room = room.id) }}';
|
let url = '{{ url_for('display_log', room = room.id) }}';
|
||||||
|
|||||||
@@ -45,15 +45,15 @@
|
|||||||
{%- set current_sphere = loop.index %}
|
{%- set current_sphere = loop.index %}
|
||||||
{%- for player, sphere_location_ids in sphere.items() %}
|
{%- for player, sphere_location_ids in sphere.items() %}
|
||||||
{%- set checked_locations = tracker_data.get_player_checked_locations(team, player) %}
|
{%- set checked_locations = tracker_data.get_player_checked_locations(team, player) %}
|
||||||
{%- set finder_game = tracker_data.get_player_game(team, player) %}
|
{%- set finder_game = tracker_data.get_player_game(player) %}
|
||||||
{%- set player_location_data = tracker_data.get_player_locations(team, player) %}
|
{%- set player_location_data = tracker_data.get_player_locations(player) %}
|
||||||
{%- for location_id in sphere_location_ids.intersection(checked_locations) %}
|
{%- for location_id in sphere_location_ids.intersection(checked_locations) %}
|
||||||
<tr>
|
<tr>
|
||||||
{%- set item_id, receiver, item_flags = player_location_data[location_id] %}
|
{%- set item_id, receiver, item_flags = player_location_data[location_id] %}
|
||||||
{%- set receiver_game = tracker_data.get_player_game(team, receiver) %}
|
{%- set receiver_game = tracker_data.get_player_game(receiver) %}
|
||||||
<td>{{ current_sphere }}</td>
|
<td>{{ current_sphere }}</td>
|
||||||
<td>{{ tracker_data.get_player_name(team, player) }}</td>
|
<td>{{ tracker_data.get_player_name(player) }}</td>
|
||||||
<td>{{ tracker_data.get_player_name(team, receiver) }}</td>
|
<td>{{ tracker_data.get_player_name(receiver) }}</td>
|
||||||
<td>{{ tracker_data.item_id_to_name[receiver_game][item_id] }}</td>
|
<td>{{ tracker_data.item_id_to_name[receiver_game][item_id] }}</td>
|
||||||
<td>{{ tracker_data.location_id_to_name[finder_game][location_id] }}</td>
|
<td>{{ tracker_data.location_id_to_name[finder_game][location_id] }}</td>
|
||||||
<td>{{ finder_game }}</td>
|
<td>{{ finder_game }}</td>
|
||||||
|
|||||||
@@ -22,14 +22,14 @@
|
|||||||
-%}
|
-%}
|
||||||
<tr>
|
<tr>
|
||||||
<td>
|
<td>
|
||||||
{% if get_slot_info(team, hint.finding_player).type == 2 %}
|
{% if get_slot_info(hint.finding_player).type == 2 %}
|
||||||
<i>{{ player_names_with_alias[(team, hint.finding_player)] }}</i>
|
<i>{{ player_names_with_alias[(team, hint.finding_player)] }}</i>
|
||||||
{% else %}
|
{% else %}
|
||||||
{{ player_names_with_alias[(team, hint.finding_player)] }}
|
{{ player_names_with_alias[(team, hint.finding_player)] }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</td>
|
</td>
|
||||||
<td>
|
<td>
|
||||||
{% if get_slot_info(team, hint.receiving_player).type == 2 %}
|
{% if get_slot_info(hint.receiving_player).type == 2 %}
|
||||||
<i>{{ player_names_with_alias[(team, hint.receiving_player)] }}</i>
|
<i>{{ player_names_with_alias[(team, hint.receiving_player)] }}</i>
|
||||||
{% else %}
|
{% else %}
|
||||||
{{ player_names_with_alias[(team, hint.receiving_player)] }}
|
{{ player_names_with_alias[(team, hint.receiving_player)] }}
|
||||||
|
|||||||
@@ -134,6 +134,7 @@
|
|||||||
|
|
||||||
{% macro OptionList(option_name, option) %}
|
{% macro OptionList(option_name, option) %}
|
||||||
{{ OptionTitle(option_name, option) }}
|
{{ OptionTitle(option_name, option) }}
|
||||||
|
<input type="hidden" id="{{ option_name }}-{{ key }}-hidden" name="{{ option_name }}" value="_ensure-empty-list"/>
|
||||||
<div class="option-container">
|
<div class="option-container">
|
||||||
{% for key in (option.valid_keys if option.valid_keys is ordered else option.valid_keys|sort) %}
|
{% for key in (option.valid_keys if option.valid_keys is ordered else option.valid_keys|sort) %}
|
||||||
<div class="option-entry">
|
<div class="option-entry">
|
||||||
@@ -146,6 +147,7 @@
|
|||||||
|
|
||||||
{% macro LocationSet(option_name, option) %}
|
{% macro LocationSet(option_name, option) %}
|
||||||
{{ OptionTitle(option_name, option) }}
|
{{ OptionTitle(option_name, option) }}
|
||||||
|
<input type="hidden" id="{{ option_name }}-{{ key }}-hidden" name="{{ option_name }}" value="_ensure-empty-list"/>
|
||||||
<div class="option-container">
|
<div class="option-container">
|
||||||
{% for group_name in world.location_name_groups.keys()|sort %}
|
{% for group_name in world.location_name_groups.keys()|sort %}
|
||||||
{% if group_name != "Everywhere" %}
|
{% if group_name != "Everywhere" %}
|
||||||
@@ -169,6 +171,7 @@
|
|||||||
|
|
||||||
{% macro ItemSet(option_name, option) %}
|
{% macro ItemSet(option_name, option) %}
|
||||||
{{ OptionTitle(option_name, option) }}
|
{{ OptionTitle(option_name, option) }}
|
||||||
|
<input type="hidden" id="{{ option_name }}-{{ key }}-hidden" name="{{ option_name }}" value="_ensure-empty-list"/>
|
||||||
<div class="option-container">
|
<div class="option-container">
|
||||||
{% for group_name in world.item_name_groups.keys()|sort %}
|
{% for group_name in world.item_name_groups.keys()|sort %}
|
||||||
{% if group_name != "Everything" %}
|
{% if group_name != "Everything" %}
|
||||||
@@ -192,6 +195,7 @@
|
|||||||
|
|
||||||
{% macro OptionSet(option_name, option) %}
|
{% macro OptionSet(option_name, option) %}
|
||||||
{{ OptionTitle(option_name, option) }}
|
{{ OptionTitle(option_name, option) }}
|
||||||
|
<input type="hidden" id="{{ option_name }}-{{ key }}-hidden" name="{{ option_name }}" value="_ensure-empty-list"/>
|
||||||
<div class="option-container">
|
<div class="option-container">
|
||||||
{% for key in (option.valid_keys if option.valid_keys is ordered else option.valid_keys|sort) %}
|
{% for key in (option.valid_keys if option.valid_keys is ordered else option.valid_keys|sort) %}
|
||||||
<div class="option-entry">
|
<div class="option-entry">
|
||||||
|
|||||||
@@ -4,16 +4,20 @@
|
|||||||
|
|
||||||
{% block head %}
|
{% block head %}
|
||||||
<title>Generation failed, please retry.</title>
|
<title>Generation failed, please retry.</title>
|
||||||
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename="styles/waitSeed.css") }}"/>
|
<link rel="stylesheet" type="text/css" href="{{ url_for('static', filename='styles/waitSeed.css') }}"/>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|
||||||
{% block body %}
|
{% block body %}
|
||||||
{% include 'header/oceanIslandHeader.html' %}
|
{% include 'header/oceanIslandHeader.html' %}
|
||||||
<div id="wait-seed-wrapper" class="grass-island">
|
<div id="wait-seed-wrapper" class="grass-island">
|
||||||
<div id="wait-seed">
|
<div id="wait-seed">
|
||||||
<h1>Generation failed</h1>
|
<h1>Generation Failed</h1>
|
||||||
<h2>please retry</h2>
|
<h2>Please try again!</h2>
|
||||||
{{ seed_error }}
|
<p>{{ seed_error }}</p>
|
||||||
|
<h4>More details:</h4>
|
||||||
|
<p>
|
||||||
|
<code class="grassy">{{ details }}</code>
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|||||||
@@ -11,32 +11,32 @@
|
|||||||
<h1>Site Map</h1>
|
<h1>Site Map</h1>
|
||||||
<h2>Base Pages</h2>
|
<h2>Base Pages</h2>
|
||||||
<ul>
|
<ul>
|
||||||
<li><a href="/discord">Discord Link</a></li>
|
<li><a href="{{ url_for('discord') }}">Discord Link</a></li>
|
||||||
<li><a href="/faq/en">F.A.Q. Page</a></li>
|
<li><a href="{{ url_for('faq', lang='en') }}">F.A.Q. Page</a></li>
|
||||||
<li><a href="/favicon.ico">Favicon</a></li>
|
<li><a href="{{ url_for('favicon') }}">Favicon</a></li>
|
||||||
<li><a href="/generate">Generate Game Page</a></li>
|
<li><a href="{{ url_for('generate') }}">Generate Game Page</a></li>
|
||||||
<li><a href="/">Homepage</a></li>
|
<li><a href="{{ url_for('landing') }}">Homepage</a></li>
|
||||||
<li><a href="/uploads">Host Game Page</a></li>
|
<li><a href="{{ url_for('uploads') }}">Host Game Page</a></li>
|
||||||
<li><a href="/datapackage">Raw Data Package</a></li>
|
<li><a href="{{ url_for('get_datapackage') }}">Raw Data Package</a></li>
|
||||||
<li><a href="{{ url_for('check')}}">Settings Validator</a></li>
|
<li><a href="{{ url_for('check') }}">Settings Validator</a></li>
|
||||||
<li><a href="/sitemap">Site Map</a></li>
|
<li><a href="{{ url_for('get_sitemap') }}">Site Map</a></li>
|
||||||
<li><a href="/start-playing">Start Playing</a></li>
|
<li><a href="{{ url_for('start_playing') }}">Start Playing</a></li>
|
||||||
<li><a href="/games">Supported Games Page</a></li>
|
<li><a href="{{ url_for('games') }}">Supported Games Page</a></li>
|
||||||
<li><a href="/tutorial">Tutorials Page</a></li>
|
<li><a href="{{ url_for('tutorial_landing') }}">Tutorials Page</a></li>
|
||||||
<li><a href="/user-content">User Content</a></li>
|
<li><a href="{{ url_for('user_content') }}">User Content</a></li>
|
||||||
<li><a href="{{url_for('stats')}}">Game Statistics</a></li>
|
<li><a href="{{ url_for('stats') }}">Game Statistics</a></li>
|
||||||
<li><a href="/glossary/en">Glossary</a></li>
|
<li><a href="{{ url_for('glossary', lang='en') }}">Glossary</a></li>
|
||||||
<li><a href="{{url_for("show_session")}}">Session / Login</a></li>
|
<li><a href="{{ url_for('show_session') }}">Session / Login</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
<h2>Tutorials</h2>
|
<h2>Tutorials</h2>
|
||||||
<ul>
|
<ul>
|
||||||
<li><a href="/tutorial/Archipelago/setup/en">Multiworld Setup Tutorial</a></li>
|
<li><a href="{{ url_for('tutorial', game='Archipelago', file='setup_en') }}">Multiworld Setup Tutorial</a></li>
|
||||||
<li><a href="/tutorial/Archipelago/mac/en">Setup Guide for Mac</a></li>
|
<li><a href="{{ url_for('tutorial', game='Archipelago', file='mac_en') }}">Setup Guide for Mac</a></li>
|
||||||
<li><a href="/tutorial/Archipelago/commands/en">Server and Client Commands</a></li>
|
<li><a href="{{ url_for('tutorial', game='Archipelago', file='commands_en') }}">Server and Client Commands</a></li>
|
||||||
<li><a href="/tutorial/Archipelago/advanced_settings/en">Advanced YAML Guide</a></li>
|
<li><a href="{{ url_for('tutorial', game='Archipelago', file='advanced_settings_en') }}">Advanced YAML Guide</a></li>
|
||||||
<li><a href="/tutorial/Archipelago/triggers/en">Triggers Guide</a></li>
|
<li><a href="{{ url_for('tutorial', game='Archipelago', file='triggers_en') }}">Triggers Guide</a></li>
|
||||||
<li><a href="/tutorial/Archipelago/plando/en">Plando Guide</a></li>
|
<li><a href="{{ url_for('tutorial', game='Archipelago', file='plando_en') }}">Plando Guide</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
|
|
||||||
<h2>Game Info Pages</h2>
|
<h2>Game Info Pages</h2>
|
||||||
|
|||||||
@@ -31,6 +31,9 @@
|
|||||||
{% include 'header/oceanHeader.html' %}
|
{% include 'header/oceanHeader.html' %}
|
||||||
<div id="games" class="markdown">
|
<div id="games" class="markdown">
|
||||||
<h1>Currently Supported Games</h1>
|
<h1>Currently Supported Games</h1>
|
||||||
|
<p>Below are the games that are currently included with the Archipelago software. To play a game that is not on
|
||||||
|
this page, please refer to the <a href="/tutorial/Archipelago/setup/en#playing-with-custom-worlds">playing with
|
||||||
|
custom worlds</a> section of the setup guide.</p>
|
||||||
<div class="js-only">
|
<div class="js-only">
|
||||||
<label for="game-search">Search for your game below!</label><br />
|
<label for="game-search">Search for your game below!</label><br />
|
||||||
<div class="page-controls">
|
<div class="page-controls">
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -139,6 +139,7 @@
|
|||||||
{% endmacro %}
|
{% endmacro %}
|
||||||
|
|
||||||
{% macro OptionList(option_name, option) %}
|
{% macro OptionList(option_name, option) %}
|
||||||
|
<input type="hidden" id="{{ option_name }}-{{ key }}-hidden" name="{{ option_name }}" value="_ensure-empty-list"/>
|
||||||
<div class="list-container">
|
<div class="list-container">
|
||||||
{% for key in (option.valid_keys if option.valid_keys is ordered else option.valid_keys|sort) %}
|
{% for key in (option.valid_keys if option.valid_keys is ordered else option.valid_keys|sort) %}
|
||||||
<div class="list-entry">
|
<div class="list-entry">
|
||||||
@@ -158,6 +159,7 @@
|
|||||||
{% endmacro %}
|
{% endmacro %}
|
||||||
|
|
||||||
{% macro LocationSet(option_name, option, world) %}
|
{% macro LocationSet(option_name, option, world) %}
|
||||||
|
<input type="hidden" id="{{ option_name }}-{{ key }}-hidden" name="{{ option_name }}" value="_ensure-empty-list"/>
|
||||||
<div class="set-container">
|
<div class="set-container">
|
||||||
{% for group_name in world.location_name_groups.keys()|sort %}
|
{% for group_name in world.location_name_groups.keys()|sort %}
|
||||||
{% if group_name != "Everywhere" %}
|
{% if group_name != "Everywhere" %}
|
||||||
@@ -180,6 +182,7 @@
|
|||||||
{% endmacro %}
|
{% endmacro %}
|
||||||
|
|
||||||
{% macro ItemSet(option_name, option, world) %}
|
{% macro ItemSet(option_name, option, world) %}
|
||||||
|
<input type="hidden" id="{{ option_name }}-{{ key }}-hidden" name="{{ option_name }}" value="_ensure-empty-list"/>
|
||||||
<div class="set-container">
|
<div class="set-container">
|
||||||
{% for group_name in world.item_name_groups.keys()|sort %}
|
{% for group_name in world.item_name_groups.keys()|sort %}
|
||||||
{% if group_name != "Everything" %}
|
{% if group_name != "Everything" %}
|
||||||
@@ -202,6 +205,7 @@
|
|||||||
{% endmacro %}
|
{% endmacro %}
|
||||||
|
|
||||||
{% macro OptionSet(option_name, option) %}
|
{% macro OptionSet(option_name, option) %}
|
||||||
|
<input type="hidden" id="{{ option_name }}-{{ key }}-hidden" name="{{ option_name }}" value="_ensure-empty-list"/>
|
||||||
<div class="set-container">
|
<div class="set-container">
|
||||||
{% for key in (option.valid_keys if option.valid_keys is ordered else option.valid_keys|sort) %}
|
{% for key in (option.valid_keys if option.valid_keys is ordered else option.valid_keys|sort) %}
|
||||||
<div class="set-entry">
|
<div class="set-entry">
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -20,6 +20,8 @@ from worlds.tloz.Items import item_game_ids
|
|||||||
from worlds.tloz.Locations import location_ids
|
from worlds.tloz.Locations import location_ids
|
||||||
from worlds.tloz import Items, Locations, Rom
|
from worlds.tloz import Items, Locations, Rom
|
||||||
|
|
||||||
|
from settings import get_settings
|
||||||
|
|
||||||
SYSTEM_MESSAGE_ID = 0
|
SYSTEM_MESSAGE_ID = 0
|
||||||
|
|
||||||
CONNECTION_TIMING_OUT_STATUS = "Connection timing out. Please restart your emulator, then restart connector_tloz.lua"
|
CONNECTION_TIMING_OUT_STATUS = "Connection timing out. Please restart your emulator, then restart connector_tloz.lua"
|
||||||
@@ -341,13 +343,12 @@ if __name__ == '__main__':
|
|||||||
# Text Mode to use !hint and such with games that have no text entry
|
# Text Mode to use !hint and such with games that have no text entry
|
||||||
Utils.init_logging("ZeldaClient")
|
Utils.init_logging("ZeldaClient")
|
||||||
|
|
||||||
options = Utils.get_options()
|
DISPLAY_MSGS = get_settings()["tloz_options"]["display_msgs"]
|
||||||
DISPLAY_MSGS = options["tloz_options"]["display_msgs"]
|
|
||||||
|
|
||||||
|
|
||||||
async def run_game(romfile: str) -> None:
|
async def run_game(romfile: str) -> None:
|
||||||
auto_start = typing.cast(typing.Union[bool, str],
|
auto_start = typing.cast(typing.Union[bool, str],
|
||||||
Utils.get_options()["tloz_options"].get("rom_start", True))
|
get_settings()["tloz_options"].get("rom_start", True))
|
||||||
if auto_start is True:
|
if auto_start is True:
|
||||||
import webbrowser
|
import webbrowser
|
||||||
webbrowser.open(romfile)
|
webbrowser.open(romfile)
|
||||||
|
|||||||
@@ -220,6 +220,8 @@
|
|||||||
<MessageBoxLabel>:
|
<MessageBoxLabel>:
|
||||||
theme_text_color: "Custom"
|
theme_text_color: "Custom"
|
||||||
text_color: 1, 1, 1, 1
|
text_color: 1, 1, 1, 1
|
||||||
|
<MessageBox>:
|
||||||
|
height: self.content.texture_size[1] + 80
|
||||||
<ScrollBox>:
|
<ScrollBox>:
|
||||||
layout: layout
|
layout: layout
|
||||||
bar_width: "12dp"
|
bar_width: "12dp"
|
||||||
@@ -233,8 +235,3 @@
|
|||||||
spacing: 10
|
spacing: 10
|
||||||
size_hint_y: None
|
size_hint_y: None
|
||||||
height: self.minimum_height
|
height: self.minimum_height
|
||||||
<MessageBoxLabel>:
|
|
||||||
valign: "middle"
|
|
||||||
halign: "center"
|
|
||||||
text_size: self.width, None
|
|
||||||
height: self.texture_size[1]
|
|
||||||
|
|||||||
@@ -33,6 +33,10 @@ description: {{ yaml_dump("Default %s Template" % game) }}
|
|||||||
game: {{ yaml_dump(game) }}
|
game: {{ yaml_dump(game) }}
|
||||||
requires:
|
requires:
|
||||||
version: {{ __version__ }} # Version of Archipelago required for this yaml to work as expected.
|
version: {{ __version__ }} # Version of Archipelago required for this yaml to work as expected.
|
||||||
|
{%- if world_version != "0.0.0" %}
|
||||||
|
game:
|
||||||
|
{{ yaml_dump(game) }}: {{ world_version }} # Version of the world required for this yaml to work as expected.
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
{%- macro range_option(option) %}
|
{%- macro range_option(option) %}
|
||||||
# You can define additional values between the minimum and maximum values.
|
# You can define additional values between the minimum and maximum values.
|
||||||
|
|||||||
@@ -1,7 +0,0 @@
|
|||||||
author: Nintendo
|
|
||||||
data: null
|
|
||||||
game: A Link to the Past
|
|
||||||
min_format_version: 1
|
|
||||||
name: Link
|
|
||||||
format_version: 1
|
|
||||||
sprite_version: 1
|
|
||||||
2
data/sprites/remote/.gitignore
vendored
2
data/sprites/remote/.gitignore
vendored
@@ -1,2 +0,0 @@
|
|||||||
*
|
|
||||||
!.gitignore
|
|
||||||
@@ -21,9 +21,6 @@
|
|||||||
# Aquaria
|
# Aquaria
|
||||||
/worlds/aquaria/ @tioui
|
/worlds/aquaria/ @tioui
|
||||||
|
|
||||||
# ArchipIDLE
|
|
||||||
/worlds/archipidle/ @LegendaryLinux
|
|
||||||
|
|
||||||
# Blasphemous
|
# Blasphemous
|
||||||
/worlds/blasphemous/ @TRPG0
|
/worlds/blasphemous/ @TRPG0
|
||||||
|
|
||||||
@@ -42,9 +39,15 @@
|
|||||||
# Celeste 64
|
# Celeste 64
|
||||||
/worlds/celeste64/ @PoryGone
|
/worlds/celeste64/ @PoryGone
|
||||||
|
|
||||||
|
# Celeste (Open World)
|
||||||
|
/worlds/celeste_open_world/ @PoryGone
|
||||||
|
|
||||||
# ChecksFinder
|
# ChecksFinder
|
||||||
/worlds/checksfinder/ @SunCatMC
|
/worlds/checksfinder/ @SunCatMC
|
||||||
|
|
||||||
|
# Choo-Choo Charles
|
||||||
|
/worlds/cccharles/ @Yaranorgoth
|
||||||
|
|
||||||
# Civilization VI
|
# Civilization VI
|
||||||
/worlds/civ6/ @hesto2
|
/worlds/civ6/ @hesto2
|
||||||
|
|
||||||
@@ -69,6 +72,9 @@
|
|||||||
# Faxanadu
|
# Faxanadu
|
||||||
/worlds/faxanadu/ @Daivuk
|
/worlds/faxanadu/ @Daivuk
|
||||||
|
|
||||||
|
# Final Fantasy (1)
|
||||||
|
/worlds/ff1/ @Rosalie-A
|
||||||
|
|
||||||
# Final Fantasy Mystic Quest
|
# Final Fantasy Mystic Quest
|
||||||
/worlds/ffmq/ @Alchav @wildham0
|
/worlds/ffmq/ @Alchav @wildham0
|
||||||
|
|
||||||
@@ -238,9 +244,6 @@
|
|||||||
# compatibility, these worlds may be deleted. If you are interested in stepping up as maintainer for
|
# compatibility, these worlds may be deleted. If you are interested in stepping up as maintainer for
|
||||||
# any of these worlds, please review `/docs/world maintainer.md` documentation.
|
# any of these worlds, please review `/docs/world maintainer.md` documentation.
|
||||||
|
|
||||||
# Final Fantasy (1)
|
|
||||||
# /worlds/ff1/
|
|
||||||
|
|
||||||
# Ocarina of Time
|
# Ocarina of Time
|
||||||
# /worlds/oot/
|
# /worlds/oot/
|
||||||
|
|
||||||
|
|||||||
@@ -62,6 +62,24 @@ if possible.
|
|||||||
* If your client appears in the Archipelago Launcher, you may define an icon for it that differentiates it from
|
* If your client appears in the Archipelago Launcher, you may define an icon for it that differentiates it from
|
||||||
other clients. The icon size is 48x48 pixels, but smaller or larger images will scale to that size.
|
other clients. The icon size is 48x48 pixels, but smaller or larger images will scale to that size.
|
||||||
|
|
||||||
|
### Launcher Integration
|
||||||
|
|
||||||
|
If you have a python client or want to utilize the integration features of the Archipelago Launcher (ex. Slot links in
|
||||||
|
webhost) you can define a Component to be a part of the Launcher. `LauncherComponents.components` can be appended to
|
||||||
|
with additional Components in order to automatically add them to the Launcher. Most Components only need a
|
||||||
|
`display_name` and `func`, but `supports_uri` and `game_name` can be defined to support launching by webhost links,
|
||||||
|
`icon` and `description` can be used to customize display in the Launcher UI, and `file_identifier` can be used to
|
||||||
|
launch by file.
|
||||||
|
|
||||||
|
Additionally, if you use `func` you have access to LauncherComponent.launch or launch_subprocess to run your
|
||||||
|
function as a subprocesses that can be utilized side by side other clients.
|
||||||
|
```py
|
||||||
|
def my_func(*args: str):
|
||||||
|
from .client import run_client
|
||||||
|
LauncherComponent.launch(run_client, name="My Client", args=args)
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## World
|
## World
|
||||||
|
|
||||||
The world is your game integration for the Archipelago generator, webhost, and multiworld server. It contains all the
|
The world is your game integration for the Archipelago generator, webhost, and multiworld server. It contains all the
|
||||||
|
|||||||
@@ -1,26 +1,83 @@
|
|||||||
# apworld Specification
|
# APWorld Specification
|
||||||
|
|
||||||
Archipelago depends on worlds to provide game-specific details like items, locations and output generation.
|
Archipelago depends on worlds to provide game-specific details like items, locations and output generation.
|
||||||
Those are located in the `worlds/` folder (source) or `<install dir>/lib/worlds/` (when installed).
|
These are called "APWorlds".
|
||||||
|
They are located in the `worlds/` folder (source) or `<install dir>/lib/worlds/` (when installed).
|
||||||
See [world api.md](world%20api.md) for details.
|
See [world api.md](world%20api.md) for details.
|
||||||
|
APWorlds can either be a folder, or they can be packaged as an .apworld file.
|
||||||
|
|
||||||
apworld provides a way to package and ship a world that is not part of the main distribution by placing a `*.apworld`
|
## .apworld File Format
|
||||||
file into the worlds folder.
|
|
||||||
|
|
||||||
**Warning:** apworlds have to be all lower case, otherwise they raise a bogus Exception when trying to import in frozen python 3.10+!
|
The `.apworld` file format provides a way to package and ship an APWorld that is not part of the main distribution
|
||||||
|
by placing a `*.apworld` file into the worlds folder.
|
||||||
|
|
||||||
|
`.apworld` files are zip archives, all lower case, with the file ending `.apworld`.
|
||||||
## File Format
|
|
||||||
|
|
||||||
apworld files are zip archives, all lower case, with the file ending `.apworld`.
|
|
||||||
The zip has to contain a folder with the same name as the zip, case-sensitive, that contains what would normally be in
|
The zip has to contain a folder with the same name as the zip, case-sensitive, that contains what would normally be in
|
||||||
the world's folder in `worlds/`. I.e. `worlds/ror2.apworld` containing `ror2/__init__.py`.
|
the world's folder in `worlds/`. I.e. `worlds/ror2.apworld` containing `ror2/__init__.py`.
|
||||||
|
|
||||||
|
**Warning:** `.apworld` files have to be all lower case,
|
||||||
|
otherwise they raise a bogus Exception when trying to import in frozen python 3.10+!
|
||||||
|
|
||||||
## Metadata
|
## Metadata
|
||||||
|
|
||||||
No metadata is specified yet.
|
Metadata about the APWorld is defined in an `archipelago.json` file.
|
||||||
|
|
||||||
|
If the APWorld is a folder, the only required field is "game":
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"game": "Game Name"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
There are also the following optional fields:
|
||||||
|
* `minimum_ap_version` and `maximum_ap_version` - which if present will each be compared against the current
|
||||||
|
Archipelago version respectively to filter those files from being loaded.
|
||||||
|
* `world_version` - an arbitrary version for that world in order to only load the newest valid world.
|
||||||
|
An APWorld without a world_version is always treated as older than one with a version
|
||||||
|
(**Must** use exactly the format `"major.minor.build"`, e.g. `1.0.0`)
|
||||||
|
* `authors` - a list of authors, to eventually be displayed in various user-facing places such as WebHost and
|
||||||
|
package managers. Should always be a list of strings.
|
||||||
|
|
||||||
|
If the APWorld is packaged as an `.apworld` zip file, it also needs to have `version` and `compatible_version`,
|
||||||
|
which refer to the version of the APContainer packaging scheme defined in [Files.py](../worlds/Files.py).
|
||||||
|
These get automatically added to the `archipelago.json` of an .apworld if it is packaged using the
|
||||||
|
["Build apworlds" launcher component](#build-apworlds-launcher-component),
|
||||||
|
which is the correct way to package your `.apworld` as a world developer. Do not write these fields yourself.
|
||||||
|
|
||||||
|
### "Build apworlds" Launcher Component
|
||||||
|
|
||||||
|
In the Archipelago Launcher, there is a "Build apworlds" component that will package all world folders to `.apworld`,
|
||||||
|
and add `archipelago.json` manifest files to them.
|
||||||
|
These .apworld files will be output to `build/apworlds` (relative to the Archipelago root directory).
|
||||||
|
The `archipelago.json` file in each .apworld will automatically include the appropriate
|
||||||
|
`version` and `compatible_version`.
|
||||||
|
|
||||||
|
If a world folder has an `archipelago.json` in its root, any fields it contains will be carried over.
|
||||||
|
So, a world folder with an `archipelago.json` that looks like this:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"game": "Game Name",
|
||||||
|
"minimum_ap_version": "0.6.4",
|
||||||
|
"world_version": "2.1.4",
|
||||||
|
"authors": ["NewSoupVi"]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
will be packaged into an `.apworld` with a manifest file inside of it that looks like this:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"minimum_ap_version": "0.6.4",
|
||||||
|
"world_version": "2.1.4",
|
||||||
|
"authors": ["NewSoupVi"],
|
||||||
|
"version": 7,
|
||||||
|
"compatible_version": 7,
|
||||||
|
"game": "Game Name"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This is the recommended workflow for packaging your world to an `.apworld`.
|
||||||
|
|
||||||
## Extra Data
|
## Extra Data
|
||||||
|
|
||||||
@@ -29,7 +86,7 @@ The zip can contain arbitrary files in addition what was specified above.
|
|||||||
|
|
||||||
## Caveats
|
## Caveats
|
||||||
|
|
||||||
Imports from other files inside the apworld have to use relative imports. e.g. `from .options import MyGameOptions`
|
Imports from other files inside the APWorld have to use relative imports. e.g. `from .options import MyGameOptions`
|
||||||
|
|
||||||
Imports from AP base have to use absolute imports, e.g. `from Options import Toggle` or
|
Imports from AP base have to use absolute imports, e.g. `from Options import Toggle` or
|
||||||
`from worlds.AutoWorld import World`
|
`from worlds.AutoWorld import World`
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ game contributions:
|
|||||||
* **Do not introduce unit test failures/regressions.**
|
* **Do not introduce unit test failures/regressions.**
|
||||||
Archipelago supports multiple versions of Python. You may need to download older Python versions to fully test
|
Archipelago supports multiple versions of Python. You may need to download older Python versions to fully test
|
||||||
your changes. Currently, the oldest supported version
|
your changes. Currently, the oldest supported version
|
||||||
is [Python 3.10](https://www.python.org/downloads/release/python-31015/).
|
is [Python 3.11](https://www.python.org/downloads/release/python-31113/).
|
||||||
It is recommended that automated github actions are turned on in your fork to have github run unit tests after
|
It is recommended that automated github actions are turned on in your fork to have github run unit tests after
|
||||||
pushing.
|
pushing.
|
||||||
You can turn them on here:
|
You can turn them on here:
|
||||||
|
|||||||
@@ -352,14 +352,14 @@ direction_matching_group_lookup = {
|
|||||||
|
|
||||||
Terrain matching or dungeon shuffle:
|
Terrain matching or dungeon shuffle:
|
||||||
```python
|
```python
|
||||||
def randomize_within_same_group(group: int) -> List[int]:
|
def randomize_within_same_group(group: int) -> list[int]:
|
||||||
return [group]
|
return [group]
|
||||||
identity_group_lookup = bake_target_group_lookup(world, randomize_within_same_group)
|
identity_group_lookup = bake_target_group_lookup(world, randomize_within_same_group)
|
||||||
```
|
```
|
||||||
|
|
||||||
Directional + area shuffle:
|
Directional + area shuffle:
|
||||||
```python
|
```python
|
||||||
def get_target_groups(group: int) -> List[int]:
|
def get_target_groups(group: int) -> list[int]:
|
||||||
# example group: LEFT | CAVE
|
# example group: LEFT | CAVE
|
||||||
# example result: [RIGHT | CAVE, DOOR | CAVE]
|
# example result: [RIGHT | CAVE, DOOR | CAVE]
|
||||||
direction = group & Groups.DIRECTION_MASK
|
direction = group & Groups.DIRECTION_MASK
|
||||||
|
|||||||
@@ -79,7 +79,7 @@ Sent to clients when they connect to an Archipelago server.
|
|||||||
| generator_version | [NetworkVersion](#NetworkVersion) | Object denoting the version of Archipelago which generated the multiworld. |
|
| generator_version | [NetworkVersion](#NetworkVersion) | Object denoting the version of Archipelago which generated the multiworld. |
|
||||||
| tags | list\[str\] | Denotes special features or capabilities that the sender is capable of. Example: `WebHost` |
|
| tags | list\[str\] | Denotes special features or capabilities that the sender is capable of. Example: `WebHost` |
|
||||||
| password | bool | Denoted whether a password is required to join this room. |
|
| password | bool | Denoted whether a password is required to join this room. |
|
||||||
| permissions | dict\[str, [Permission](#Permission)\[int\]\] | Mapping of permission name to [Permission](#Permission), keys are: "release", "collect" and "remaining". |
|
| permissions | dict\[str, [Permission](#Permission)\] | Mapping of permission name to [Permission](#Permission), keys are: "release", "collect" and "remaining". |
|
||||||
| hint_cost | int | The percentage of total locations that need to be checked to receive a hint from the server. |
|
| hint_cost | int | The percentage of total locations that need to be checked to receive a hint from the server. |
|
||||||
| location_check_points | int | The amount of hint points you receive per item/location check completed. |
|
| location_check_points | int | The amount of hint points you receive per item/location check completed. |
|
||||||
| games | list\[str\] | List of games present in this multiworld. |
|
| games | list\[str\] | List of games present in this multiworld. |
|
||||||
@@ -662,13 +662,14 @@ class SlotType(enum.IntFlag):
|
|||||||
An object representing static information about a slot.
|
An object representing static information about a slot.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
import typing
|
from collections.abc import Sequence
|
||||||
|
from typing import NamedTuple
|
||||||
from NetUtils import SlotType
|
from NetUtils import SlotType
|
||||||
class NetworkSlot(typing.NamedTuple):
|
class NetworkSlot(NamedTuple):
|
||||||
name: str
|
name: str
|
||||||
game: str
|
game: str
|
||||||
type: SlotType
|
type: SlotType
|
||||||
group_members: typing.List[int] = [] # only populated if type == group
|
group_members: Sequence[int] = [] # only populated if type == group
|
||||||
```
|
```
|
||||||
|
|
||||||
### Permission
|
### Permission
|
||||||
@@ -686,8 +687,8 @@ class Permission(enum.IntEnum):
|
|||||||
### Hint
|
### Hint
|
||||||
An object representing a Hint.
|
An object representing a Hint.
|
||||||
```python
|
```python
|
||||||
import typing
|
from typing import NamedTuple
|
||||||
class Hint(typing.NamedTuple):
|
class Hint(NamedTuple):
|
||||||
receiving_player: int
|
receiving_player: int
|
||||||
finding_player: int
|
finding_player: int
|
||||||
location: int
|
location: int
|
||||||
|
|||||||
@@ -7,10 +7,10 @@ use that version. These steps are for developers or platforms without compiled r
|
|||||||
## General
|
## General
|
||||||
|
|
||||||
What you'll need:
|
What you'll need:
|
||||||
* [Python 3.10.11 or newer](https://www.python.org/downloads/), not the Windows Store version
|
* [Python 3.11.9 or newer](https://www.python.org/downloads/), not the Windows Store version
|
||||||
* On Windows, please consider only using the latest supported version in production environments since security
|
* On Windows, please consider only using the latest supported version in production environments since security
|
||||||
updates for older versions are not easily available.
|
updates for older versions are not easily available.
|
||||||
* Python 3.12.x is currently the newest supported version
|
* Python 3.13.x is currently the newest supported version
|
||||||
* pip: included in downloads from python.org, separate in many Linux distributions
|
* pip: included in downloads from python.org, separate in many Linux distributions
|
||||||
* Matching C compiler
|
* Matching C compiler
|
||||||
* possibly optional, read operating system specific sections
|
* possibly optional, read operating system specific sections
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ if it does not exist.
|
|||||||
## Global Settings
|
## Global Settings
|
||||||
|
|
||||||
All non-world-specific settings are defined directly in settings.py.
|
All non-world-specific settings are defined directly in settings.py.
|
||||||
Each value needs to have a default. If the default should be `None`, define it as `typing.Optional` and assign `None`.
|
Each value needs to have a default. If the default should be `None`, annotate it using `T | None = None`.
|
||||||
|
|
||||||
To access a "global" config value, with correct typing, use one of
|
To access a "global" config value, with correct typing, use one of
|
||||||
```python
|
```python
|
||||||
|
|||||||
18
docs/shared_cache.md
Normal file
18
docs/shared_cache.md
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Shared Cache
|
||||||
|
|
||||||
|
Archipelago maintains a shared folder of information that can be persisted for a machine and reused across Libraries.
|
||||||
|
It can be found at the User Cache Directory for appname `Archipelago` in the `Cache` subfolder
|
||||||
|
(ex. `%LOCALAPPDATA%/Archipelago/Cache`).
|
||||||
|
|
||||||
|
## Common Cache
|
||||||
|
|
||||||
|
The Common Cache `common.json` can be used to store any generic data that is expected to be shared across programs
|
||||||
|
for the same User.
|
||||||
|
|
||||||
|
* `uuid`: A UUID identifier used to identify clients as from the same user/machine, to be sent in the Connect packet
|
||||||
|
|
||||||
|
## Data Package Cache
|
||||||
|
|
||||||
|
The `datapackage` folder in the shared cache folder is used to store datapackages by game and checksum to be reused
|
||||||
|
in order to save network traffic. The expected structure is `datapackage/Game Name/checksum_value.json` with the
|
||||||
|
contents of each json file being the no-whitespace datapackage contents.
|
||||||
@@ -15,8 +15,10 @@
|
|||||||
* Prefer [format string literals](https://peps.python.org/pep-0498/) over string concatenation,
|
* Prefer [format string literals](https://peps.python.org/pep-0498/) over string concatenation,
|
||||||
use single quotes inside them: `f"Like {dct['key']}"`
|
use single quotes inside them: `f"Like {dct['key']}"`
|
||||||
* Use type annotations where possible for function signatures and class members.
|
* Use type annotations where possible for function signatures and class members.
|
||||||
* Use type annotations where appropriate for local variables (e.g. `var: List[int] = []`, or when the
|
* Use type annotations where appropriate for local variables (e.g. `var: list[int] = []`, or when the
|
||||||
type is hard or impossible to deduce.) Clear annotations help developers look up and validate API calls.
|
type is hard or impossible to deduce). Clear annotations help developers look up and validate API calls.
|
||||||
|
* Prefer new style type annotations for new code (e.g. `var: dict[str, str | int]` over
|
||||||
|
`var: Dict[str, Union[str, int]]`).
|
||||||
* If a line ends with an open bracket/brace/parentheses, the matching closing bracket should be at the
|
* If a line ends with an open bracket/brace/parentheses, the matching closing bracket should be at the
|
||||||
beginning of a line at the same indentation as the beginning of the line with the open bracket.
|
beginning of a line at the same indentation as the beginning of the line with the open bracket.
|
||||||
```python
|
```python
|
||||||
@@ -60,3 +62,9 @@
|
|||||||
* Indent `case` inside `switch ` with 2 spaces.
|
* Indent `case` inside `switch ` with 2 spaces.
|
||||||
* Use single quotes.
|
* Use single quotes.
|
||||||
* Semicolons are required after every statement.
|
* Semicolons are required after every statement.
|
||||||
|
|
||||||
|
## KV
|
||||||
|
|
||||||
|
* Style should be defined in `.kv` as much as possible, only Python when unavailable.
|
||||||
|
* Should follow [our Python style](#python-code) where appropriate (quotation marks, indentation).
|
||||||
|
* When escaping a line break, add a space between code and backslash.
|
||||||
|
|||||||
@@ -82,10 +82,10 @@ overridden. For more information on what methods are available to your class, ch
|
|||||||
|
|
||||||
#### Alternatives to WorldTestBase
|
#### Alternatives to WorldTestBase
|
||||||
|
|
||||||
Unit tests can also be created using [TestBase](/test/bases.py#L16) or
|
Unit tests can also be created using
|
||||||
[unittest.TestCase](https://docs.python.org/3/library/unittest.html#unittest.TestCase) depending on your use case. These
|
[unittest.TestCase](https://docs.python.org/3/library/unittest.html#unittest.TestCase) directly. These may be useful
|
||||||
may be useful for generating a multiworld under very specific constraints without using the generic world setup, or for
|
for generating a multiworld under very specific constraints without using the generic world setup, or for testing
|
||||||
testing portions of your code that can be tested without relying on a multiworld to be created first.
|
portions of your code that can be tested without relying on a multiworld to be created first.
|
||||||
|
|
||||||
#### Parametrization
|
#### Parametrization
|
||||||
|
|
||||||
@@ -102,8 +102,7 @@ for multiple inputs) the base test. Some important things to consider when attem
|
|||||||
|
|
||||||
* Classes inheriting from `WorldTestBase`, including those created by the helpers in `test.param`, will run all
|
* Classes inheriting from `WorldTestBase`, including those created by the helpers in `test.param`, will run all
|
||||||
base tests by default, make sure the produced tests actually do what you aim for and do not waste a lot of
|
base tests by default, make sure the produced tests actually do what you aim for and do not waste a lot of
|
||||||
extra CPU time. Consider using `TestBase` or `unittest.TestCase` directly
|
extra CPU time. Consider using `unittest.TestCase` directly or setting `WorldTestBase.run_default_tests` to False.
|
||||||
or setting `WorldTestBase.run_default_tests` to False.
|
|
||||||
|
|
||||||
#### Performance Considerations
|
#### Performance Considerations
|
||||||
|
|
||||||
|
|||||||
@@ -16,6 +16,10 @@ Current endpoints:
|
|||||||
- [`/status/<suuid:seed>`](#status)
|
- [`/status/<suuid:seed>`](#status)
|
||||||
- Room API
|
- Room API
|
||||||
- [`/room_status/<suuid:room_id>`](#roomstatus)
|
- [`/room_status/<suuid:room_id>`](#roomstatus)
|
||||||
|
- Tracker API
|
||||||
|
- [`/tracker/<suuid:tracker>`](#tracker)
|
||||||
|
- [`/static_tracker/<suuid:tracker>`](#statictracker)
|
||||||
|
- [`/slot_data_tracker/<suuid:tracker>`](#slotdatatracker)
|
||||||
- User API
|
- User API
|
||||||
- [`/get_rooms`](#getrooms)
|
- [`/get_rooms`](#getrooms)
|
||||||
- [`/get_seeds`](#getseeds)
|
- [`/get_seeds`](#getseeds)
|
||||||
@@ -244,6 +248,212 @@ Example:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Tracker Endpoints
|
||||||
|
Endpoints to fetch information regarding players of an active WebHost room with the supplied tracker_ID. The tracker ID
|
||||||
|
can either be viewed while on a room tracker page, or from the [room's endpoint](#room-endpoints).
|
||||||
|
|
||||||
|
### `/tracker/<suuid:tracker>`
|
||||||
|
<a name=tracker></a>
|
||||||
|
Will provide a dict of tracker data with the following keys:
|
||||||
|
|
||||||
|
- Each player's current alias (`aliases`)
|
||||||
|
- Will return the name if there is none
|
||||||
|
- A list of items each player has received as a NetworkItem (`player_items_received`)
|
||||||
|
- A list of checks done by each player as a list of the location id's (`player_checks_done`)
|
||||||
|
- The total number of checks done by all players (`total_checks_done`)
|
||||||
|
- Hints that players have used or received (`hints`)
|
||||||
|
- The time of last activity of each player in RFC 1123 format (`activity_timers`)
|
||||||
|
- The time of last active connection of each player in RFC 1123 format (`connection_timers`)
|
||||||
|
- The current client status of each player (`player_status`)
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"aliases": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"player": 1,
|
||||||
|
"alias": "Incompetence"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"player": 2,
|
||||||
|
"alias": "Slot_Name_2"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"player_items_received": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"player": 1,
|
||||||
|
"items": [
|
||||||
|
[1, 1, 1, 0],
|
||||||
|
[2, 2, 2, 1]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"player": 2,
|
||||||
|
"items": [
|
||||||
|
[1, 1, 1, 2],
|
||||||
|
[2, 2, 2, 0]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"player_checks_done": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"player": 1,
|
||||||
|
"locations": [
|
||||||
|
1,
|
||||||
|
2
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"player": 2,
|
||||||
|
"locations": [
|
||||||
|
1,
|
||||||
|
2
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"total_checks_done": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"checks_done": 4
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"hints": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"player": 1,
|
||||||
|
"hints": [
|
||||||
|
[1, 2, 4, 6, 0, "", 4, 0]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"player": 2,
|
||||||
|
"hints": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"activity_timers": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"player": 1,
|
||||||
|
"time": "Fri, 18 Apr 2025 20:35:45 GMT"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"player": 2,
|
||||||
|
"time": "Fri, 18 Apr 2025 20:42:46 GMT"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"connection_timers": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"player": 1,
|
||||||
|
"time": "Fri, 18 Apr 2025 20:38:25 GMT"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"player": 2,
|
||||||
|
"time": "Fri, 18 Apr 2025 21:03:00 GMT"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"player_status": [
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"player": 1,
|
||||||
|
"status": 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"team": 0,
|
||||||
|
"player": 2,
|
||||||
|
"status": 0
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `/static_tracker/<suuid:tracker>`
|
||||||
|
<a name=statictracker></a>
|
||||||
|
Will provide a dict of static tracker data with the following keys:
|
||||||
|
|
||||||
|
- item_link groups and their players (`groups`)
|
||||||
|
- The datapackage hash for each game (`datapackage`)
|
||||||
|
- This hash can then be sent to the datapackage API to receive the appropriate datapackage as necessary
|
||||||
|
- The number of checks found vs. total checks available per player (`player_locations_total`)
|
||||||
|
- Same logic as the multitracker template: found = len(player_checks_done.locations) / total = player_locations_total.total_locations (all available checks).
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"groups": [
|
||||||
|
{
|
||||||
|
"slot": 5,
|
||||||
|
"name": "testGroup",
|
||||||
|
"members": [
|
||||||
|
1,
|
||||||
|
2
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"slot": 6,
|
||||||
|
"name": "myCoolLink",
|
||||||
|
"members": [
|
||||||
|
3,
|
||||||
|
4
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"datapackage": {
|
||||||
|
"Archipelago": {
|
||||||
|
"checksum": "ac9141e9ad0318df2fa27da5f20c50a842afeecb",
|
||||||
|
},
|
||||||
|
"The Messenger": {
|
||||||
|
"checksum": "6991cbcda7316b65bcb072667f3ee4c4cae71c0b",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"player_locations_total": [
|
||||||
|
{
|
||||||
|
"player": 1,
|
||||||
|
"team" : 0,
|
||||||
|
"total_locations": 10
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"player": 2,
|
||||||
|
"team" : 0,
|
||||||
|
"total_locations": 20
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `/slot_data_tracker/<suuid:tracker>`
|
||||||
|
<a name=slotdatatracker></a>
|
||||||
|
Will provide a list of each player's slot_data.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"player": 1,
|
||||||
|
"slot_data": {
|
||||||
|
"example_option": 1,
|
||||||
|
"other_option": 3
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"player": 2,
|
||||||
|
"slot_data": {
|
||||||
|
"example_option": 1,
|
||||||
|
"other_option": 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
## User Endpoints
|
## User Endpoints
|
||||||
User endpoints can get room and seed details from the current session tokens (cookies)
|
User endpoints can get room and seed details from the current session tokens (cookies)
|
||||||
|
|
||||||
|
|||||||
@@ -76,8 +76,8 @@ webhost:
|
|||||||
* `game_info_languages` (optional) list of strings for defining the existing game info pages your game supports. The
|
* `game_info_languages` (optional) list of strings for defining the existing game info pages your game supports. The
|
||||||
documents must be prefixed with the same string as defined here. Default already has 'en'.
|
documents must be prefixed with the same string as defined here. Default already has 'en'.
|
||||||
|
|
||||||
* `options_presets` (optional) `Dict[str, Dict[str, Any]]` where the keys are the names of the presets and the values
|
* `options_presets` (optional) `dict[str, dict[str, Any]]` where the keys are the names of the presets and the values
|
||||||
are the options to be set for that preset. The options are defined as a `Dict[str, Any]` where the keys are the names
|
are the options to be set for that preset. The options are defined as a `dict[str, Any]` where the keys are the names
|
||||||
of the options and the values are the values to be set for that option. These presets will be available for users to
|
of the options and the values are the values to be set for that option. These presets will be available for users to
|
||||||
select from on the game's options page.
|
select from on the game's options page.
|
||||||
|
|
||||||
@@ -257,6 +257,14 @@ another flag like "progression", it means "an especially useful progression item
|
|||||||
combined with `progression`; see below)
|
combined with `progression`; see below)
|
||||||
* `progression_skip_balancing`: the combination of `progression` and `skip_balancing`, i.e., a progression item that
|
* `progression_skip_balancing`: the combination of `progression` and `skip_balancing`, i.e., a progression item that
|
||||||
will not be moved around by progression balancing; used, e.g., for currency or tokens, to not flood early spheres
|
will not be moved around by progression balancing; used, e.g., for currency or tokens, to not flood early spheres
|
||||||
|
* `deprioritized`: denotes that an item should not be placed on priority locations
|
||||||
|
(to be combined with `progression`; see below)
|
||||||
|
* `progression_deprioritized`: the combination of `progression` and `deprioritized`, i.e. a progression item that
|
||||||
|
should not be placed on priority locations, despite being progression;
|
||||||
|
like skip_balancing, this is commonly used for currency or tokens.
|
||||||
|
* `progression_deprioritized_skip_balancing`: the combination of `progression`, `deprioritized` and `skip_balancing`.
|
||||||
|
Since there is overlap between the kind of items that want `skip_balancing` and `deprioritized`,
|
||||||
|
this combined classification exists for convenience
|
||||||
|
|
||||||
### Regions
|
### Regions
|
||||||
|
|
||||||
@@ -745,7 +753,7 @@ from BaseClasses import CollectionState, MultiWorld
|
|||||||
from worlds.AutoWorld import LogicMixin
|
from worlds.AutoWorld import LogicMixin
|
||||||
|
|
||||||
class MyGameState(LogicMixin):
|
class MyGameState(LogicMixin):
|
||||||
mygame_defeatable_enemies: Dict[int, Set[str]] # per player
|
mygame_defeatable_enemies: dict[int, set[str]] # per player
|
||||||
|
|
||||||
def init_mixin(self, multiworld: MultiWorld) -> None:
|
def init_mixin(self, multiworld: MultiWorld) -> None:
|
||||||
# Initialize per player with the corresponding "nothing" value, such as 0 or an empty set.
|
# Initialize per player with the corresponding "nothing" value, such as 0 or an empty set.
|
||||||
@@ -874,11 +882,11 @@ item/location pairs is unnecessary since the AP server already retains and freel
|
|||||||
that request it. The most common usage of slot data is sending option results that the client needs to be aware of.
|
that request it. The most common usage of slot data is sending option results that the client needs to be aware of.
|
||||||
|
|
||||||
```python
|
```python
|
||||||
def fill_slot_data(self) -> Dict[str, Any]:
|
def fill_slot_data(self) -> dict[str, Any]:
|
||||||
# In order for our game client to handle the generated seed correctly we need to know what the user selected
|
# In order for our game client to handle the generated seed correctly we need to know what the user selected
|
||||||
# for their difficulty and final boss HP.
|
# for their difficulty and final boss HP.
|
||||||
# A dictionary returned from this method gets set as the slot_data and will be sent to the client after connecting.
|
# A dictionary returned from this method gets set as the slot_data and will be sent to the client after connecting.
|
||||||
# The options dataclass has a method to return a `Dict[str, Any]` of each option name provided and the relevant
|
# The options dataclass has a method to return a `dict[str, Any]` of each option name provided and the relevant
|
||||||
# option's value.
|
# option's value.
|
||||||
return self.options.as_dict("difficulty", "final_boss_hp")
|
return self.options.as_dict("difficulty", "final_boss_hp")
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -74,13 +74,12 @@ class EntranceLookup:
|
|||||||
if entrance in self._expands_graph_cache:
|
if entrance in self._expands_graph_cache:
|
||||||
return self._expands_graph_cache[entrance]
|
return self._expands_graph_cache[entrance]
|
||||||
|
|
||||||
visited = set()
|
seen = {entrance.connected_region}
|
||||||
q: deque[Region] = deque()
|
q: deque[Region] = deque()
|
||||||
q.append(entrance.connected_region)
|
q.append(entrance.connected_region)
|
||||||
|
|
||||||
while q:
|
while q:
|
||||||
region = q.popleft()
|
region = q.popleft()
|
||||||
visited.add(region)
|
|
||||||
|
|
||||||
# check if the region itself is progression
|
# check if the region itself is progression
|
||||||
if region in region.multiworld.indirect_connections:
|
if region in region.multiworld.indirect_connections:
|
||||||
@@ -103,7 +102,8 @@ class EntranceLookup:
|
|||||||
and exit_ in self._usable_exits):
|
and exit_ in self._usable_exits):
|
||||||
self._expands_graph_cache[entrance] = True
|
self._expands_graph_cache[entrance] = True
|
||||||
return True
|
return True
|
||||||
elif exit_.connected_region and exit_.connected_region not in visited:
|
elif exit_.connected_region and exit_.connected_region not in seen:
|
||||||
|
seen.add(exit_.connected_region)
|
||||||
q.append(exit_.connected_region)
|
q.append(exit_.connected_region)
|
||||||
|
|
||||||
self._expands_graph_cache[entrance] = False
|
self._expands_graph_cache[entrance] = False
|
||||||
|
|||||||
@@ -180,8 +180,8 @@ Root: HKCR; Subkey: "{#MyAppName}mm2patch\shell\open\command"; ValueData: """{a
|
|||||||
|
|
||||||
Root: HKCR; Subkey: ".apladx"; ValueData: "{#MyAppName}ladxpatch"; Flags: uninsdeletevalue; ValueType: string; ValueName: "";
|
Root: HKCR; Subkey: ".apladx"; ValueData: "{#MyAppName}ladxpatch"; Flags: uninsdeletevalue; ValueType: string; ValueName: "";
|
||||||
Root: HKCR; Subkey: "{#MyAppName}ladxpatch"; ValueData: "Archipelago Links Awakening DX Patch"; Flags: uninsdeletekey; ValueType: string; ValueName: "";
|
Root: HKCR; Subkey: "{#MyAppName}ladxpatch"; ValueData: "Archipelago Links Awakening DX Patch"; Flags: uninsdeletekey; ValueType: string; ValueName: "";
|
||||||
Root: HKCR; Subkey: "{#MyAppName}ladxpatch\DefaultIcon"; ValueData: "{app}\ArchipelagoLinksAwakeningClient.exe,0"; ValueType: string; ValueName: "";
|
Root: HKCR; Subkey: "{#MyAppName}ladxpatch\DefaultIcon"; ValueData: "{app}\ArchipelagoLauncher.exe,0"; ValueType: string; ValueName: "";
|
||||||
Root: HKCR; Subkey: "{#MyAppName}ladxpatch\shell\open\command"; ValueData: """{app}\ArchipelagoLinksAwakeningClient.exe"" ""%1"""; ValueType: string; ValueName: "";
|
Root: HKCR; Subkey: "{#MyAppName}ladxpatch\shell\open\command"; ValueData: """{app}\ArchipelagoLauncher.exe"" ""%1"""; ValueType: string; ValueName: "";
|
||||||
|
|
||||||
Root: HKCR; Subkey: ".aptloz"; ValueData: "{#MyAppName}tlozpatch"; Flags: uninsdeletevalue; ValueType: string; ValueName: "";
|
Root: HKCR; Subkey: ".aptloz"; ValueData: "{#MyAppName}tlozpatch"; Flags: uninsdeletevalue; ValueType: string; ValueName: "";
|
||||||
Root: HKCR; Subkey: "{#MyAppName}tlozpatch"; ValueData: "Archipelago The Legend of Zelda Patch"; Flags: uninsdeletekey; ValueType: string; ValueName: "";
|
Root: HKCR; Subkey: "{#MyAppName}tlozpatch"; ValueData: "Archipelago The Legend of Zelda Patch"; Flags: uninsdeletekey; ValueType: string; ValueName: "";
|
||||||
|
|||||||
25
kvui.py
25
kvui.py
@@ -34,6 +34,17 @@ from kivy.config import Config
|
|||||||
Config.set("input", "mouse", "mouse,disable_multitouch")
|
Config.set("input", "mouse", "mouse,disable_multitouch")
|
||||||
Config.set("kivy", "exit_on_escape", "0")
|
Config.set("kivy", "exit_on_escape", "0")
|
||||||
Config.set("graphics", "multisamples", "0") # multisamples crash old intel drivers
|
Config.set("graphics", "multisamples", "0") # multisamples crash old intel drivers
|
||||||
|
|
||||||
|
# Workaround for an issue where importing kivy.core.window before loading sounds
|
||||||
|
# will hang the whole application on Linux once the first sound is loaded.
|
||||||
|
# kivymd imports kivy.core.window, so we have to do this before the first kivymd import.
|
||||||
|
# No longer necessary when we switch to kivy 3.0.0, which fixes this issue.
|
||||||
|
from kivy.core.audio import SoundLoader
|
||||||
|
for classobj in SoundLoader._classes:
|
||||||
|
# The least invasive way to force a SoundLoader class to load its audio engine seems to be calling
|
||||||
|
# .extensions(), which e.g. in audio_sdl2.pyx then calls a function called "mix_init()"
|
||||||
|
classobj.extensions()
|
||||||
|
|
||||||
from kivymd.uix.divider import MDDivider
|
from kivymd.uix.divider import MDDivider
|
||||||
from kivy.core.window import Window
|
from kivy.core.window import Window
|
||||||
from kivy.core.clipboard import Clipboard
|
from kivy.core.clipboard import Clipboard
|
||||||
@@ -720,13 +731,11 @@ class MessageBoxLabel(MDLabel):
|
|||||||
|
|
||||||
|
|
||||||
class MessageBox(Popup):
|
class MessageBox(Popup):
|
||||||
|
|
||||||
def __init__(self, title, text, error=False, **kwargs):
|
def __init__(self, title, text, error=False, **kwargs):
|
||||||
label = MessageBoxLabel(text=text)
|
label = MessageBoxLabel(text=text, padding=("6dp", "0dp"))
|
||||||
separator_color = [217 / 255, 129 / 255, 122 / 255, 1.] if error else [47 / 255., 167 / 255., 212 / 255, 1.]
|
separator_color = [217 / 255, 129 / 255, 122 / 255, 1.] if error else [47 / 255., 167 / 255., 212 / 255, 1.]
|
||||||
super().__init__(title=title, content=label, size_hint=(0.5, None), width=max(100, int(label.width) + 40),
|
super().__init__(title=title, content=label, size_hint=(0.5, None), width=max(100, int(label.width) + 40),
|
||||||
separator_color=separator_color, **kwargs)
|
separator_color=separator_color, **kwargs)
|
||||||
self.height += max(0, label.height - 18)
|
|
||||||
|
|
||||||
|
|
||||||
class MDNavigationItemBase(MDNavigationItem):
|
class MDNavigationItemBase(MDNavigationItem):
|
||||||
@@ -840,15 +849,15 @@ class GameManager(ThemedApp):
|
|||||||
self.log_panels: typing.Dict[str, Widget] = {}
|
self.log_panels: typing.Dict[str, Widget] = {}
|
||||||
|
|
||||||
# keep track of last used command to autofill on click
|
# keep track of last used command to autofill on click
|
||||||
self.last_autofillable_command = "hint"
|
self.last_autofillable_command = "!hint"
|
||||||
autofillable_commands = ("hint_location", "hint", "getitem")
|
autofillable_commands = ("!hint_location", "!hint", "!getitem")
|
||||||
original_say = ctx.on_user_say
|
original_say = ctx.on_user_say
|
||||||
|
|
||||||
def intercept_say(text):
|
def intercept_say(text):
|
||||||
text = original_say(text)
|
text = original_say(text)
|
||||||
if text:
|
if text:
|
||||||
for command in autofillable_commands:
|
for command in autofillable_commands:
|
||||||
if text.startswith("!" + command):
|
if text.startswith(command):
|
||||||
self.last_autofillable_command = command
|
self.last_autofillable_command = command
|
||||||
break
|
break
|
||||||
return text
|
return text
|
||||||
@@ -1101,10 +1110,6 @@ class GameManager(ThemedApp):
|
|||||||
hints = self.ctx.stored_data.get(f"_read_hints_{self.ctx.team}_{self.ctx.slot}", [])
|
hints = self.ctx.stored_data.get(f"_read_hints_{self.ctx.team}_{self.ctx.slot}", [])
|
||||||
self.hint_log.refresh_hints(hints)
|
self.hint_log.refresh_hints(hints)
|
||||||
|
|
||||||
# default F1 keybind, opens a settings menu, that seems to break the layout engine once closed
|
|
||||||
def open_settings(self, *largs):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class LogtoUI(logging.Handler):
|
class LogtoUI(logging.Handler):
|
||||||
def __init__(self, on_log):
|
def __init__(self, on_log):
|
||||||
|
|||||||
16
ruff.toml
Normal file
16
ruff.toml
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
line-length = 120
|
||||||
|
indent-width = 4
|
||||||
|
target-version = "py311"
|
||||||
|
|
||||||
|
[lint]
|
||||||
|
select = ["B", "C", "E", "F", "W", "I", "N", "Q", "UP", "RET", "RSE", "RUF", "ISC", "PLC", "PLE", "PLW", "T20", "PERF"]
|
||||||
|
ignore = [
|
||||||
|
"B011", # In AP, the use of assert False is essential because we optimise out these statements for release builds.
|
||||||
|
"C901", # Author disagrees with limiting branch complexity
|
||||||
|
"N818", # Author agrees with this rule, but Core AP violates this and changing it would be a hassle.
|
||||||
|
"PLC0415", # In AP, we consider local imports totally fine & necessary
|
||||||
|
"PLC1802", # Author agrees with this rule, but it literally changes the functionality of the code, which is unsafe.
|
||||||
|
"PLC1901", # This is just not equivalent
|
||||||
|
"PLE1141", # Gives false positives when the dict keys are tuples, but does not mention this in the suggested fix.
|
||||||
|
"UP015", # Explicit is better than implicit, so we'd prefer to keep "r" in open() calls.
|
||||||
|
]
|
||||||
12
settings.py
12
settings.py
@@ -579,6 +579,17 @@ class ServerOptions(Group):
|
|||||||
"goal" -> Client can ask for remaining items after goal completion
|
"goal" -> Client can ask for remaining items after goal completion
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
class CountdownMode(str):
|
||||||
|
"""
|
||||||
|
Countdown modes
|
||||||
|
Determines whether or not a player can initiate a countdown with !countdown
|
||||||
|
Note that /countdown is always available to the host.
|
||||||
|
|
||||||
|
"enabled" -> Client can always initiate a countdown with !countdown.
|
||||||
|
"disabled" -> Client can never initiate a countdown with !countdown.
|
||||||
|
"auto" -> !countdown will be available for any room with less than 30 slots.
|
||||||
|
"""
|
||||||
|
|
||||||
class AutoShutdown(int):
|
class AutoShutdown(int):
|
||||||
"""Automatically shut down the server after this many seconds without new location checks, 0 to keep running"""
|
"""Automatically shut down the server after this many seconds without new location checks, 0 to keep running"""
|
||||||
|
|
||||||
@@ -613,6 +624,7 @@ class ServerOptions(Group):
|
|||||||
release_mode: ReleaseMode = ReleaseMode("auto")
|
release_mode: ReleaseMode = ReleaseMode("auto")
|
||||||
collect_mode: CollectMode = CollectMode("auto")
|
collect_mode: CollectMode = CollectMode("auto")
|
||||||
remaining_mode: RemainingMode = RemainingMode("goal")
|
remaining_mode: RemainingMode = RemainingMode("goal")
|
||||||
|
countdown_mode: CountdownMode = CountdownMode("auto")
|
||||||
auto_shutdown: AutoShutdown = AutoShutdown(0)
|
auto_shutdown: AutoShutdown = AutoShutdown(0)
|
||||||
compatibility: Compatibility = Compatibility(2)
|
compatibility: Compatibility = Compatibility(2)
|
||||||
log_network: LogNetwork = LogNetwork(0)
|
log_network: LogNetwork = LogNetwork(0)
|
||||||
|
|||||||
44
setup.py
44
setup.py
@@ -22,7 +22,7 @@ SNI_VERSION = "v0.0.100" # change back to "latest" once tray icon issues are fi
|
|||||||
|
|
||||||
|
|
||||||
# This is a bit jank. We need cx-Freeze to be able to run anything from this script, so install it
|
# This is a bit jank. We need cx-Freeze to be able to run anything from this script, so install it
|
||||||
requirement = 'cx-Freeze==8.0.0'
|
requirement = 'cx-Freeze==8.4.0'
|
||||||
try:
|
try:
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
try:
|
try:
|
||||||
@@ -30,7 +30,7 @@ try:
|
|||||||
install_cx_freeze = False
|
install_cx_freeze = False
|
||||||
except pkg_resources.ResolutionError:
|
except pkg_resources.ResolutionError:
|
||||||
install_cx_freeze = True
|
install_cx_freeze = True
|
||||||
except ImportError:
|
except (AttributeError, ImportError):
|
||||||
install_cx_freeze = True
|
install_cx_freeze = True
|
||||||
pkg_resources = None # type: ignore[assignment]
|
pkg_resources = None # type: ignore[assignment]
|
||||||
|
|
||||||
@@ -65,7 +65,6 @@ from Cython.Build import cythonize
|
|||||||
non_apworlds: set[str] = {
|
non_apworlds: set[str] = {
|
||||||
"A Link to the Past",
|
"A Link to the Past",
|
||||||
"Adventure",
|
"Adventure",
|
||||||
"ArchipIDLE",
|
|
||||||
"Archipelago",
|
"Archipelago",
|
||||||
"Lufia II Ancient Cave",
|
"Lufia II Ancient Cave",
|
||||||
"Meritous",
|
"Meritous",
|
||||||
@@ -147,7 +146,16 @@ def download_SNI() -> None:
|
|||||||
|
|
||||||
signtool: str | None = None
|
signtool: str | None = None
|
||||||
try:
|
try:
|
||||||
with urllib.request.urlopen('http://192.168.206.4:12345/connector/status') as response:
|
import socket
|
||||||
|
|
||||||
|
sign_host, sign_port = "192.168.206.4", 12345
|
||||||
|
# check if the sign_host is on a local network
|
||||||
|
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||||
|
s.connect((sign_host, sign_port))
|
||||||
|
if s.getsockname()[0].rsplit(".", 1)[0] != sign_host.rsplit(".", 1)[0]:
|
||||||
|
raise ConnectionError() # would go through default route
|
||||||
|
# configure signtool
|
||||||
|
with urllib.request.urlopen(f"http://{sign_host}:{sign_port}/connector/status") as response:
|
||||||
html = response.read()
|
html = response.read()
|
||||||
if b"status=OK\n" in html:
|
if b"status=OK\n" in html:
|
||||||
signtool = (r'signtool sign /sha1 6df76fe776b82869a5693ddcb1b04589cffa6faf /fd sha256 /td sha256 '
|
signtool = (r'signtool sign /sha1 6df76fe776b82869a5693ddcb1b04589cffa6faf /fd sha256 /td sha256 '
|
||||||
@@ -372,6 +380,7 @@ class BuildExeCommand(cx_Freeze.command.build_exe.build_exe):
|
|||||||
os.makedirs(self.buildfolder / "Players" / "Templates", exist_ok=True)
|
os.makedirs(self.buildfolder / "Players" / "Templates", exist_ok=True)
|
||||||
from Options import generate_yaml_templates
|
from Options import generate_yaml_templates
|
||||||
from worlds.AutoWorld import AutoWorldRegister
|
from worlds.AutoWorld import AutoWorldRegister
|
||||||
|
from worlds.Files import APWorldContainer
|
||||||
assert not non_apworlds - set(AutoWorldRegister.world_types), \
|
assert not non_apworlds - set(AutoWorldRegister.world_types), \
|
||||||
f"Unknown world {non_apworlds - set(AutoWorldRegister.world_types)} designated for .apworld"
|
f"Unknown world {non_apworlds - set(AutoWorldRegister.world_types)} designated for .apworld"
|
||||||
folders_to_remove: list[str] = []
|
folders_to_remove: list[str] = []
|
||||||
@@ -380,13 +389,36 @@ class BuildExeCommand(cx_Freeze.command.build_exe.build_exe):
|
|||||||
if worldname not in non_apworlds:
|
if worldname not in non_apworlds:
|
||||||
file_name = os.path.split(os.path.dirname(worldtype.__file__))[1]
|
file_name = os.path.split(os.path.dirname(worldtype.__file__))[1]
|
||||||
world_directory = self.libfolder / "worlds" / file_name
|
world_directory = self.libfolder / "worlds" / file_name
|
||||||
|
if os.path.isfile(world_directory / "archipelago.json"):
|
||||||
|
with open(os.path.join(world_directory, "archipelago.json"), mode="r", encoding="utf-8") as manifest_file:
|
||||||
|
manifest = json.load(manifest_file)
|
||||||
|
|
||||||
|
assert "game" in manifest, (
|
||||||
|
f"World directory {world_directory} has an archipelago.json manifest file, but it"
|
||||||
|
"does not define a \"game\"."
|
||||||
|
)
|
||||||
|
assert manifest["game"] == worldtype.game, (
|
||||||
|
f"World directory {world_directory} has an archipelago.json manifest file, but value of the"
|
||||||
|
f"\"game\" field ({manifest['game']} does not equal the World class's game ({worldtype.game})."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
manifest = {}
|
||||||
# this method creates an apworld that cannot be moved to a different OS or minor python version,
|
# this method creates an apworld that cannot be moved to a different OS or minor python version,
|
||||||
# which should be ok
|
# which should be ok
|
||||||
with zipfile.ZipFile(self.libfolder / "worlds" / (file_name + ".apworld"), "x", zipfile.ZIP_DEFLATED,
|
zip_path = self.libfolder / "worlds" / (file_name + ".apworld")
|
||||||
|
apworld = APWorldContainer(str(zip_path))
|
||||||
|
apworld.minimum_ap_version = version_tuple
|
||||||
|
apworld.maximum_ap_version = version_tuple
|
||||||
|
apworld.game = worldtype.game
|
||||||
|
manifest.update(apworld.get_manifest())
|
||||||
|
apworld.manifest_path = f"{file_name}/archipelago.json"
|
||||||
|
with zipfile.ZipFile(zip_path, "x", zipfile.ZIP_DEFLATED,
|
||||||
compresslevel=9) as zf:
|
compresslevel=9) as zf:
|
||||||
for path in world_directory.rglob("*.*"):
|
for path in world_directory.rglob("*.*"):
|
||||||
relative_path = os.path.join(*path.parts[path.parts.index("worlds")+1:])
|
relative_path = os.path.join(*path.parts[path.parts.index("worlds")+1:])
|
||||||
zf.write(path, relative_path)
|
if not relative_path.endswith("archipelago.json"):
|
||||||
|
zf.write(path, relative_path)
|
||||||
|
zf.writestr(apworld.manifest_path, json.dumps(manifest))
|
||||||
folders_to_remove.append(file_name)
|
folders_to_remove.append(file_name)
|
||||||
shutil.rmtree(world_directory)
|
shutil.rmtree(world_directory)
|
||||||
shutil.copyfile("meta.yaml", self.buildfolder / "Players" / "Templates" / "meta.yaml")
|
shutil.copyfile("meta.yaml", self.buildfolder / "Players" / "Templates" / "meta.yaml")
|
||||||
|
|||||||
@@ -1,3 +0,0 @@
|
|||||||
from .bases import TestBase, WorldTestBase
|
|
||||||
from warnings import warn
|
|
||||||
warn("TestBase was renamed to bases", DeprecationWarning)
|
|
||||||
@@ -9,98 +9,7 @@ from test.general import gen_steps
|
|||||||
from worlds import AutoWorld
|
from worlds import AutoWorld
|
||||||
from worlds.AutoWorld import World, call_all
|
from worlds.AutoWorld import World, call_all
|
||||||
|
|
||||||
from BaseClasses import Location, MultiWorld, CollectionState, ItemClassification, Item
|
from BaseClasses import Location, MultiWorld, CollectionState, Item
|
||||||
from worlds.alttp.Items import item_factory
|
|
||||||
|
|
||||||
|
|
||||||
class TestBase(unittest.TestCase):
|
|
||||||
multiworld: MultiWorld
|
|
||||||
_state_cache = {}
|
|
||||||
|
|
||||||
def get_state(self, items):
|
|
||||||
if (self.multiworld, tuple(items)) in self._state_cache:
|
|
||||||
return self._state_cache[self.multiworld, tuple(items)]
|
|
||||||
state = CollectionState(self.multiworld)
|
|
||||||
for item in items:
|
|
||||||
item.classification = ItemClassification.progression
|
|
||||||
state.collect(item, prevent_sweep=True)
|
|
||||||
state.sweep_for_advancements()
|
|
||||||
state.update_reachable_regions(1)
|
|
||||||
self._state_cache[self.multiworld, tuple(items)] = state
|
|
||||||
return state
|
|
||||||
|
|
||||||
def get_path(self, state, region):
|
|
||||||
def flist_to_iter(node):
|
|
||||||
while node:
|
|
||||||
value, node = node
|
|
||||||
yield value
|
|
||||||
|
|
||||||
from itertools import zip_longest
|
|
||||||
reversed_path_as_flist = state.path.get(region, (region, None))
|
|
||||||
string_path_flat = reversed(list(map(str, flist_to_iter(reversed_path_as_flist))))
|
|
||||||
# Now we combine the flat string list into (region, exit) pairs
|
|
||||||
pathsiter = iter(string_path_flat)
|
|
||||||
pathpairs = zip_longest(pathsiter, pathsiter)
|
|
||||||
return list(pathpairs)
|
|
||||||
|
|
||||||
def run_location_tests(self, access_pool):
|
|
||||||
for i, (location, access, *item_pool) in enumerate(access_pool):
|
|
||||||
items = item_pool[0]
|
|
||||||
all_except = item_pool[1] if len(item_pool) > 1 else None
|
|
||||||
state = self._get_items(item_pool, all_except)
|
|
||||||
path = self.get_path(state, self.multiworld.get_location(location, 1).parent_region)
|
|
||||||
with self.subTest(msg="Reach Location", location=location, access=access, items=items,
|
|
||||||
all_except=all_except, path=path, entry=i):
|
|
||||||
|
|
||||||
self.assertEqual(self.multiworld.get_location(location, 1).can_reach(state), access,
|
|
||||||
f"failed {self.multiworld.get_location(location, 1)} with: {item_pool}")
|
|
||||||
|
|
||||||
# check for partial solution
|
|
||||||
if not all_except and access: # we are not supposed to be able to reach location with partial inventory
|
|
||||||
for missing_item in item_pool[0]:
|
|
||||||
with self.subTest(msg="Location reachable without required item", location=location,
|
|
||||||
items=item_pool[0], missing_item=missing_item, entry=i):
|
|
||||||
state = self._get_items_partial(item_pool, missing_item)
|
|
||||||
|
|
||||||
self.assertEqual(self.multiworld.get_location(location, 1).can_reach(state), False,
|
|
||||||
f"failed {self.multiworld.get_location(location, 1)}: succeeded with "
|
|
||||||
f"{missing_item} removed from: {item_pool}")
|
|
||||||
|
|
||||||
def run_entrance_tests(self, access_pool):
|
|
||||||
for i, (entrance, access, *item_pool) in enumerate(access_pool):
|
|
||||||
items = item_pool[0]
|
|
||||||
all_except = item_pool[1] if len(item_pool) > 1 else None
|
|
||||||
state = self._get_items(item_pool, all_except)
|
|
||||||
path = self.get_path(state, self.multiworld.get_entrance(entrance, 1).parent_region)
|
|
||||||
with self.subTest(msg="Reach Entrance", entrance=entrance, access=access, items=items,
|
|
||||||
all_except=all_except, path=path, entry=i):
|
|
||||||
|
|
||||||
self.assertEqual(self.multiworld.get_entrance(entrance, 1).can_reach(state), access)
|
|
||||||
|
|
||||||
# check for partial solution
|
|
||||||
if not all_except and access: # we are not supposed to be able to reach location with partial inventory
|
|
||||||
for missing_item in item_pool[0]:
|
|
||||||
with self.subTest(msg="Entrance reachable without required item", entrance=entrance,
|
|
||||||
items=item_pool[0], missing_item=missing_item, entry=i):
|
|
||||||
state = self._get_items_partial(item_pool, missing_item)
|
|
||||||
self.assertEqual(self.multiworld.get_entrance(entrance, 1).can_reach(state), False,
|
|
||||||
f"failed {self.multiworld.get_entrance(entrance, 1)} with: {item_pool}")
|
|
||||||
|
|
||||||
def _get_items(self, item_pool, all_except):
|
|
||||||
if all_except and len(all_except) > 0:
|
|
||||||
items = self.multiworld.itempool[:]
|
|
||||||
items = [item for item in items if
|
|
||||||
item.name not in all_except and not ("Bottle" in item.name and "AnyBottle" in all_except)]
|
|
||||||
items.extend(item_factory(item_pool[0], self.multiworld.worlds[1]))
|
|
||||||
else:
|
|
||||||
items = item_factory(item_pool[0], self.multiworld.worlds[1])
|
|
||||||
return self.get_state(items)
|
|
||||||
|
|
||||||
def _get_items_partial(self, item_pool, missing_item):
|
|
||||||
new_items = item_pool[0].copy()
|
|
||||||
new_items.remove(missing_item)
|
|
||||||
items = item_factory(new_items, self.multiworld.worlds[1])
|
|
||||||
return self.get_state(items)
|
|
||||||
|
|
||||||
|
|
||||||
class WorldTestBase(unittest.TestCase):
|
class WorldTestBase(unittest.TestCase):
|
||||||
|
|||||||
0
test/benchmark/compression/__init__.py
Normal file
0
test/benchmark/compression/__init__.py
Normal file
227
test/benchmark/compression/benchmark.py
Normal file
227
test/benchmark/compression/benchmark.py
Normal file
@@ -0,0 +1,227 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# based on python-websockets compression benchmark (c) Aymeric Augustin and contributors
|
||||||
|
# https://github.com/python-websockets/websockets/blob/main/experiments/compression/benchmark.py
|
||||||
|
|
||||||
|
import collections
|
||||||
|
import time
|
||||||
|
import zlib
|
||||||
|
from typing import Iterable
|
||||||
|
|
||||||
|
|
||||||
|
REPEAT = 10
|
||||||
|
|
||||||
|
WB, ML = 12, 5 # defaults used as a reference
|
||||||
|
WBITS = range(9, 16)
|
||||||
|
MEMLEVELS = range(1, 10)
|
||||||
|
|
||||||
|
|
||||||
|
def benchmark(data: Iterable[bytes]) -> None:
|
||||||
|
size: dict[int, dict[int, float]] = collections.defaultdict(dict)
|
||||||
|
duration: dict[int, dict[int, float]] = collections.defaultdict(dict)
|
||||||
|
|
||||||
|
for wbits in WBITS:
|
||||||
|
for memLevel in MEMLEVELS:
|
||||||
|
encoder = zlib.compressobj(wbits=-wbits, memLevel=memLevel)
|
||||||
|
encoded = []
|
||||||
|
|
||||||
|
print(f"Compressing {REPEAT} times with {wbits=} and {memLevel=}")
|
||||||
|
|
||||||
|
t0 = time.perf_counter()
|
||||||
|
|
||||||
|
for _ in range(REPEAT):
|
||||||
|
for item in data:
|
||||||
|
# Taken from PerMessageDeflate.encode
|
||||||
|
item = encoder.compress(item) + encoder.flush(zlib.Z_SYNC_FLUSH)
|
||||||
|
if item.endswith(b"\x00\x00\xff\xff"):
|
||||||
|
item = item[:-4]
|
||||||
|
encoded.append(item)
|
||||||
|
|
||||||
|
t1 = time.perf_counter()
|
||||||
|
|
||||||
|
size[wbits][memLevel] = sum(len(item) for item in encoded) / REPEAT
|
||||||
|
duration[wbits][memLevel] = (t1 - t0) / REPEAT
|
||||||
|
|
||||||
|
raw_size = sum(len(item) for item in data)
|
||||||
|
|
||||||
|
print("=" * 79)
|
||||||
|
print("Compression ratio")
|
||||||
|
print("=" * 79)
|
||||||
|
print("\t".join(["wb \\ ml"] + [str(memLevel) for memLevel in MEMLEVELS]))
|
||||||
|
for wbits in WBITS:
|
||||||
|
print(
|
||||||
|
"\t".join(
|
||||||
|
[str(wbits)]
|
||||||
|
+ [
|
||||||
|
f"{100 * (1 - size[wbits][memLevel] / raw_size):.1f}%"
|
||||||
|
for memLevel in MEMLEVELS
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
print("=" * 79)
|
||||||
|
print()
|
||||||
|
|
||||||
|
print("=" * 79)
|
||||||
|
print("CPU time")
|
||||||
|
print("=" * 79)
|
||||||
|
print("\t".join(["wb \\ ml"] + [str(memLevel) for memLevel in MEMLEVELS]))
|
||||||
|
for wbits in WBITS:
|
||||||
|
print(
|
||||||
|
"\t".join(
|
||||||
|
[str(wbits)]
|
||||||
|
+ [
|
||||||
|
f"{1000 * duration[wbits][memLevel]:.1f}ms"
|
||||||
|
for memLevel in MEMLEVELS
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
print("=" * 79)
|
||||||
|
print()
|
||||||
|
|
||||||
|
print("=" * 79)
|
||||||
|
print(f"Size vs. {WB} \\ {ML}")
|
||||||
|
print("=" * 79)
|
||||||
|
print("\t".join(["wb \\ ml"] + [str(memLevel) for memLevel in MEMLEVELS]))
|
||||||
|
for wbits in WBITS:
|
||||||
|
print(
|
||||||
|
"\t".join(
|
||||||
|
[str(wbits)]
|
||||||
|
+ [
|
||||||
|
f"{100 * (size[wbits][memLevel] / size[WB][ML] - 1):.1f}%"
|
||||||
|
for memLevel in MEMLEVELS
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
print("=" * 79)
|
||||||
|
print()
|
||||||
|
|
||||||
|
print("=" * 79)
|
||||||
|
print(f"Time vs. {WB} \\ {ML}")
|
||||||
|
print("=" * 79)
|
||||||
|
print("\t".join(["wb \\ ml"] + [str(memLevel) for memLevel in MEMLEVELS]))
|
||||||
|
for wbits in WBITS:
|
||||||
|
print(
|
||||||
|
"\t".join(
|
||||||
|
[str(wbits)]
|
||||||
|
+ [
|
||||||
|
f"{100 * (duration[wbits][memLevel] / duration[WB][ML] - 1):.1f}%"
|
||||||
|
for memLevel in MEMLEVELS
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
print("=" * 79)
|
||||||
|
print()
|
||||||
|
|
||||||
|
|
||||||
|
def generate_data_package_corpus() -> list[bytes]:
|
||||||
|
# compared to default 12, 5:
|
||||||
|
# 11, 4 saves 16K RAM, gives +4.6% size, -5.0% time .. +1.1% time
|
||||||
|
# 10, 4 saves 20K RAM, gives +10.2% size, -3.8% time .. +0.6% time
|
||||||
|
# 11, 3 saves 20K RAM, gives +6.5% size, +14.2% time
|
||||||
|
# 10, 3 saves 24K RAM, gives +12.8% size, +0.5% time .. +6.9% time
|
||||||
|
# NOTE: time delta is highly unstable; time is ~100ms
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("ignore")
|
||||||
|
|
||||||
|
from NetUtils import encode
|
||||||
|
from worlds import network_data_package
|
||||||
|
|
||||||
|
return [encode(network_data_package).encode("utf-8")]
|
||||||
|
|
||||||
|
|
||||||
|
def generate_solo_release_corpus() -> list[bytes]:
|
||||||
|
# compared to default 12, 5:
|
||||||
|
# 11, 4 saves 16K RAM, gives +0.9% size, +3.9% time
|
||||||
|
# 10, 4 saves 20K RAM, gives +1.4% size, +3.4% time
|
||||||
|
# 11, 3 saves 20K RAM, gives +1.8% size, +13.9% time
|
||||||
|
# 10, 3 saves 24K RAM, gives +2.1% size, +4.8% time
|
||||||
|
# NOTE: time delta is highly unstable; time is ~0.4ms
|
||||||
|
|
||||||
|
from random import Random
|
||||||
|
from MultiServer import json_format_send_event
|
||||||
|
from NetUtils import encode, NetworkItem
|
||||||
|
|
||||||
|
r = Random()
|
||||||
|
r.seed(0)
|
||||||
|
solo_release = []
|
||||||
|
solo_release_locations = [r.randint(1000, 1999) for _ in range(200)]
|
||||||
|
solo_release_items = sorted([r.randint(1000, 1999) for _ in range(200)]) # currently sorted by item
|
||||||
|
solo_player = 1
|
||||||
|
for location, item in zip(solo_release_locations, solo_release_items):
|
||||||
|
flags = r.choice((0, 0, 0, 0, 0, 0, 0, 1, 2, 3))
|
||||||
|
network_item = NetworkItem(item, location, solo_player, flags)
|
||||||
|
solo_release.append(json_format_send_event(network_item, solo_player))
|
||||||
|
solo_release.append({
|
||||||
|
"cmd": "ReceivedItems",
|
||||||
|
"index": 0,
|
||||||
|
"items": solo_release_items,
|
||||||
|
})
|
||||||
|
solo_release.append({
|
||||||
|
"cmd": "RoomUpdate",
|
||||||
|
"hint_points": 200,
|
||||||
|
"checked_locations": solo_release_locations,
|
||||||
|
})
|
||||||
|
return [encode(solo_release).encode("utf-8")]
|
||||||
|
|
||||||
|
|
||||||
|
def generate_gameplay_corpus() -> list[bytes]:
|
||||||
|
# compared to default 12, 5:
|
||||||
|
# 11, 4 saves 16K RAM, gives +13.6% size, +4.1% time
|
||||||
|
# 10, 4 saves 20K RAM, gives +22.3% size, +2.2% time
|
||||||
|
# 10, 3 saves 24K RAM, gives +26.2% size, +1.6% time
|
||||||
|
# NOTE: time delta is highly unstable; time is 4ms
|
||||||
|
|
||||||
|
from copy import copy
|
||||||
|
from random import Random
|
||||||
|
from MultiServer import json_format_send_event
|
||||||
|
from NetUtils import encode, NetworkItem
|
||||||
|
|
||||||
|
r = Random()
|
||||||
|
r.seed(0)
|
||||||
|
gameplay = []
|
||||||
|
observer = 1
|
||||||
|
hint_points = 0
|
||||||
|
index = 0
|
||||||
|
players = list(range(1, 10))
|
||||||
|
player_locations = {player: [r.randint(1000, 1999) for _ in range(200)] for player in players}
|
||||||
|
player_items = {player: [r.randint(1000, 1999) for _ in range(200)] for player in players}
|
||||||
|
player_receiver = {player: [r.randint(1, len(players)) for _ in range(200)] for player in players}
|
||||||
|
for i in range(0, len(player_locations[1])):
|
||||||
|
player_sequence = copy(players)
|
||||||
|
r.shuffle(player_sequence)
|
||||||
|
for finder in player_sequence:
|
||||||
|
flags = r.choice((0, 0, 0, 0, 0, 0, 0, 1, 2, 3))
|
||||||
|
receiver = player_receiver[finder][i]
|
||||||
|
item = player_items[finder][i]
|
||||||
|
location = player_locations[finder][i]
|
||||||
|
network_item = NetworkItem(item, location, receiver, flags)
|
||||||
|
gameplay.append(json_format_send_event(network_item, observer))
|
||||||
|
if finder == observer:
|
||||||
|
hint_points += 1
|
||||||
|
gameplay.append({
|
||||||
|
"cmd": "RoomUpdate",
|
||||||
|
"hint_points": hint_points,
|
||||||
|
"checked_locations": [location],
|
||||||
|
})
|
||||||
|
if receiver == observer:
|
||||||
|
gameplay.append({
|
||||||
|
"cmd": "ReceivedItems",
|
||||||
|
"index": index,
|
||||||
|
"items": [item],
|
||||||
|
})
|
||||||
|
index += 1
|
||||||
|
return [encode(gameplay).encode("utf-8")]
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
#corpus = generate_data_package_corpus()
|
||||||
|
#corpus = generate_solo_release_corpus()
|
||||||
|
#corpus = generate_gameplay_corpus()
|
||||||
|
corpus = generate_data_package_corpus() + generate_solo_release_corpus() + generate_gameplay_corpus()
|
||||||
|
benchmark(corpus)
|
||||||
|
print(f"raw size: {sum(len(data) for data in corpus)}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -1,4 +1,12 @@
|
|||||||
def run_locations_benchmark():
|
def run_locations_benchmark(freeze_gc: bool = True) -> None:
|
||||||
|
"""
|
||||||
|
Run a benchmark of location access rule performance against an empty_state and an all_state.
|
||||||
|
|
||||||
|
:param freeze_gc: Whether to freeze gc before benchmarking and unfreeze gc afterward. Freezing gc moves all objects
|
||||||
|
tracked by the garbage collector to a permanent generation, ignoring them in all future collections. Freezing
|
||||||
|
greatly reduces the duration of running gc.collect() within benchmarks, which otherwise often takes much longer
|
||||||
|
than running all iterations for the location rule being benchmarked.
|
||||||
|
"""
|
||||||
import argparse
|
import argparse
|
||||||
import logging
|
import logging
|
||||||
import gc
|
import gc
|
||||||
@@ -34,6 +42,8 @@ def run_locations_benchmark():
|
|||||||
return "\n".join(f" {time:.4f} in {name}" for name, time in counter.most_common(top))
|
return "\n".join(f" {time:.4f} in {name}" for name, time in counter.most_common(top))
|
||||||
|
|
||||||
def location_test(self, test_location: Location, state: CollectionState, state_name: str) -> float:
|
def location_test(self, test_location: Location, state: CollectionState, state_name: str) -> float:
|
||||||
|
if freeze_gc:
|
||||||
|
gc.freeze()
|
||||||
with TimeIt(f"{test_location.game} {self.rule_iterations} "
|
with TimeIt(f"{test_location.game} {self.rule_iterations} "
|
||||||
f"runs of {test_location}.access_rule({state_name})", logger) as t:
|
f"runs of {test_location}.access_rule({state_name})", logger) as t:
|
||||||
for _ in range(self.rule_iterations):
|
for _ in range(self.rule_iterations):
|
||||||
@@ -41,6 +51,8 @@ def run_locations_benchmark():
|
|||||||
# if time is taken to disentangle complex ref chains,
|
# if time is taken to disentangle complex ref chains,
|
||||||
# this time should be attributed to the rule.
|
# this time should be attributed to the rule.
|
||||||
gc.collect()
|
gc.collect()
|
||||||
|
if freeze_gc:
|
||||||
|
gc.unfreeze()
|
||||||
return t.dif
|
return t.dif
|
||||||
|
|
||||||
def main(self):
|
def main(self):
|
||||||
@@ -64,9 +76,13 @@ def run_locations_benchmark():
|
|||||||
|
|
||||||
gc.collect()
|
gc.collect()
|
||||||
for step in self.gen_steps:
|
for step in self.gen_steps:
|
||||||
|
if freeze_gc:
|
||||||
|
gc.freeze()
|
||||||
with TimeIt(f"{game} step {step}", logger):
|
with TimeIt(f"{game} step {step}", logger):
|
||||||
call_all(multiworld, step)
|
call_all(multiworld, step)
|
||||||
gc.collect()
|
gc.collect()
|
||||||
|
if freeze_gc:
|
||||||
|
gc.unfreeze()
|
||||||
|
|
||||||
locations = sorted(multiworld.get_unfilled_locations())
|
locations = sorted(multiworld.get_unfilled_locations())
|
||||||
if not locations:
|
if not locations:
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
from argparse import Namespace
|
from argparse import Namespace
|
||||||
from typing import List, Optional, Tuple, Type, Union
|
from typing import Any, List, Optional, Tuple, Type
|
||||||
|
|
||||||
from BaseClasses import CollectionState, Item, ItemClassification, Location, MultiWorld, Region
|
from BaseClasses import CollectionState, Item, ItemClassification, Location, MultiWorld, Region
|
||||||
from worlds import network_data_package
|
from worlds import network_data_package
|
||||||
from worlds.AutoWorld import World, call_all
|
from worlds.AutoWorld import World, WebWorld, call_all
|
||||||
|
|
||||||
gen_steps = (
|
gen_steps = (
|
||||||
"generate_early",
|
"generate_early",
|
||||||
@@ -17,7 +17,7 @@ gen_steps = (
|
|||||||
|
|
||||||
|
|
||||||
def setup_solo_multiworld(
|
def setup_solo_multiworld(
|
||||||
world_type: Type[World], steps: Tuple[str, ...] = gen_steps, seed: Optional[int] = None
|
world_type: Type[World], steps: Tuple[str, ...] = gen_steps, seed: Optional[int] = None
|
||||||
) -> MultiWorld:
|
) -> MultiWorld:
|
||||||
"""
|
"""
|
||||||
Creates a multiworld with a single player of `world_type`, sets default options, and calls provided gen steps.
|
Creates a multiworld with a single player of `world_type`, sets default options, and calls provided gen steps.
|
||||||
@@ -31,8 +31,8 @@ def setup_solo_multiworld(
|
|||||||
return setup_multiworld(world_type, steps, seed)
|
return setup_multiworld(world_type, steps, seed)
|
||||||
|
|
||||||
|
|
||||||
def setup_multiworld(worlds: Union[List[Type[World]], Type[World]], steps: Tuple[str, ...] = gen_steps,
|
def setup_multiworld(worlds: list[type[World]] | type[World], steps: tuple[str, ...] = gen_steps,
|
||||||
seed: Optional[int] = None) -> MultiWorld:
|
seed: int | None = None, options: dict[str, Any] | list[dict[str, Any]] = None) -> MultiWorld:
|
||||||
"""
|
"""
|
||||||
Creates a multiworld with a player for each provided world type, allowing duplicates, setting default options, and
|
Creates a multiworld with a player for each provided world type, allowing duplicates, setting default options, and
|
||||||
calling the provided gen steps.
|
calling the provided gen steps.
|
||||||
@@ -40,20 +40,27 @@ def setup_multiworld(worlds: Union[List[Type[World]], Type[World]], steps: Tuple
|
|||||||
:param worlds: Type/s of worlds to generate a multiworld for
|
:param worlds: Type/s of worlds to generate a multiworld for
|
||||||
:param steps: Gen steps that should be called before returning. Default calls through pre_fill
|
:param steps: Gen steps that should be called before returning. Default calls through pre_fill
|
||||||
:param seed: The seed to be used when creating this multiworld
|
:param seed: The seed to be used when creating this multiworld
|
||||||
|
:param options: Options to set on each world. If just one dict of options is passed, it will be used for all worlds.
|
||||||
:return: The generated multiworld
|
:return: The generated multiworld
|
||||||
"""
|
"""
|
||||||
if not isinstance(worlds, list):
|
if not isinstance(worlds, list):
|
||||||
worlds = [worlds]
|
worlds = [worlds]
|
||||||
|
|
||||||
|
if options is None:
|
||||||
|
options = [{}] * len(worlds)
|
||||||
|
elif not isinstance(options, list):
|
||||||
|
options = [options] * len(worlds)
|
||||||
|
|
||||||
players = len(worlds)
|
players = len(worlds)
|
||||||
multiworld = MultiWorld(players)
|
multiworld = MultiWorld(players)
|
||||||
multiworld.game = {player: world_type.game for player, world_type in enumerate(worlds, 1)}
|
multiworld.game = {player: world_type.game for player, world_type in enumerate(worlds, 1)}
|
||||||
multiworld.player_name = {player: f"Tester{player}" for player in multiworld.player_ids}
|
multiworld.player_name = {player: f"Tester{player}" for player in multiworld.player_ids}
|
||||||
multiworld.set_seed(seed)
|
multiworld.set_seed(seed)
|
||||||
args = Namespace()
|
args = Namespace()
|
||||||
for player, world_type in enumerate(worlds, 1):
|
for player, (world_type, option_overrides) in enumerate(zip(worlds, options), 1):
|
||||||
for key, option in world_type.options_dataclass.type_hints.items():
|
for key, option in world_type.options_dataclass.type_hints.items():
|
||||||
updated_options = getattr(args, key, {})
|
updated_options = getattr(args, key, {})
|
||||||
updated_options[player] = option.from_any(option.default)
|
updated_options[player] = option.from_any(option_overrides.get(key, option.default))
|
||||||
setattr(args, key, updated_options)
|
setattr(args, key, updated_options)
|
||||||
multiworld.set_options(args)
|
multiworld.set_options(args)
|
||||||
multiworld.state = CollectionState(multiworld)
|
multiworld.state = CollectionState(multiworld)
|
||||||
@@ -62,11 +69,16 @@ def setup_multiworld(worlds: Union[List[Type[World]], Type[World]], steps: Tuple
|
|||||||
return multiworld
|
return multiworld
|
||||||
|
|
||||||
|
|
||||||
|
class TestWebWorld(WebWorld):
|
||||||
|
tutorials = []
|
||||||
|
|
||||||
|
|
||||||
class TestWorld(World):
|
class TestWorld(World):
|
||||||
game = f"Test Game"
|
game = f"Test Game"
|
||||||
item_name_to_id = {}
|
item_name_to_id = {}
|
||||||
location_name_to_id = {}
|
location_name_to_id = {}
|
||||||
hidden = True
|
hidden = True
|
||||||
|
web = TestWebWorld()
|
||||||
|
|
||||||
|
|
||||||
# add our test world to the data package, so we can test it later
|
# add our test world to the data package, so we can test it later
|
||||||
|
|||||||
@@ -6,9 +6,9 @@ from Utils import get_intended_text, get_input_text_from_response
|
|||||||
class TestClient(unittest.TestCase):
|
class TestClient(unittest.TestCase):
|
||||||
def test_autofill_hint_from_fuzzy_hint(self) -> None:
|
def test_autofill_hint_from_fuzzy_hint(self) -> None:
|
||||||
tests = (
|
tests = (
|
||||||
("item", ["item1", "item2"]), # Multiple close matches
|
("item", ["item1", "item2"]), # Multiple close matches
|
||||||
("itm", ["item1", "item21"]), # No close match, multiple option
|
("itm", ["item1", "item21"]), # No close match, multiple option
|
||||||
("item", ["item1"]), # No close match, single option
|
("item", ["item1"]), # No close match, single option
|
||||||
("item", ["\"item\" 'item' (item)"]), # Testing different special characters
|
("item", ["\"item\" 'item' (item)"]), # Testing different special characters
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -16,7 +16,7 @@ class TestClient(unittest.TestCase):
|
|||||||
item_name, usable, response = get_intended_text(input_text, possible_answers)
|
item_name, usable, response = get_intended_text(input_text, possible_answers)
|
||||||
self.assertFalse(usable, "This test must be updated, it seems get_fuzzy_results behavior changed")
|
self.assertFalse(usable, "This test must be updated, it seems get_fuzzy_results behavior changed")
|
||||||
|
|
||||||
hint_command = get_input_text_from_response(response, "hint")
|
hint_command = get_input_text_from_response(response, "!hint")
|
||||||
self.assertIsNotNone(hint_command,
|
self.assertIsNotNone(hint_command,
|
||||||
"The response to fuzzy hints is no longer recognized by the hint autofill")
|
"The response to fuzzy hints is no longer recognized by the hint autofill")
|
||||||
self.assertEqual(hint_command, f"!hint {item_name}",
|
self.assertEqual(hint_command, f"!hint {item_name}",
|
||||||
|
|||||||
@@ -37,10 +37,11 @@ class TestImplemented(unittest.TestCase):
|
|||||||
|
|
||||||
def test_slot_data(self):
|
def test_slot_data(self):
|
||||||
"""Tests that if a world creates slot data, it's json serializable."""
|
"""Tests that if a world creates slot data, it's json serializable."""
|
||||||
for game_name, world_type in AutoWorldRegister.world_types.items():
|
# has an await for generate_output which isn't being called
|
||||||
# has an await for generate_output which isn't being called
|
excluded_games = ("Ocarina of Time",)
|
||||||
if game_name in {"Ocarina of Time"}:
|
worlds_to_test = {game: world
|
||||||
continue
|
for game, world in AutoWorldRegister.world_types.items() if game not in excluded_games}
|
||||||
|
for game_name, world_type in worlds_to_test.items():
|
||||||
multiworld = setup_solo_multiworld(world_type)
|
multiworld = setup_solo_multiworld(world_type)
|
||||||
with self.subTest(game=game_name, seed=multiworld.seed):
|
with self.subTest(game=game_name, seed=multiworld.seed):
|
||||||
distribute_items_restrictive(multiworld)
|
distribute_items_restrictive(multiworld)
|
||||||
|
|||||||
@@ -150,8 +150,7 @@ class TestBase(unittest.TestCase):
|
|||||||
"""Test that worlds don't modify the locality of items after duplicates are resolved"""
|
"""Test that worlds don't modify the locality of items after duplicates are resolved"""
|
||||||
gen_steps = ("generate_early",)
|
gen_steps = ("generate_early",)
|
||||||
additional_steps = ("create_regions", "create_items", "set_rules", "connect_entrances", "generate_basic", "pre_fill")
|
additional_steps = ("create_regions", "create_items", "set_rules", "connect_entrances", "generate_basic", "pre_fill")
|
||||||
worlds_to_test = {game: world for game, world in AutoWorldRegister.world_types.items()}
|
for game_name, world_type in AutoWorldRegister.world_types.items():
|
||||||
for game_name, world_type in worlds_to_test.items():
|
|
||||||
with self.subTest("Game", game=game_name):
|
with self.subTest("Game", game=game_name):
|
||||||
multiworld = setup_solo_multiworld(world_type, gen_steps)
|
multiworld = setup_solo_multiworld(world_type, gen_steps)
|
||||||
local_items = multiworld.worlds[1].options.local_items.value.copy()
|
local_items = multiworld.worlds[1].options.local_items.value.copy()
|
||||||
|
|||||||
@@ -33,7 +33,10 @@ class TestBase(unittest.TestCase):
|
|||||||
def test_location_creation_steps(self):
|
def test_location_creation_steps(self):
|
||||||
"""Tests that Regions and Locations aren't created after `create_items`."""
|
"""Tests that Regions and Locations aren't created after `create_items`."""
|
||||||
gen_steps = ("generate_early", "create_regions", "create_items")
|
gen_steps = ("generate_early", "create_regions", "create_items")
|
||||||
for game_name, world_type in AutoWorldRegister.world_types.items():
|
excluded_games = ("Ocarina of Time", "Pokemon Red and Blue")
|
||||||
|
worlds_to_test = {game: world
|
||||||
|
for game, world in AutoWorldRegister.world_types.items() if game not in excluded_games}
|
||||||
|
for game_name, world_type in worlds_to_test.items():
|
||||||
with self.subTest("Game", game_name=game_name):
|
with self.subTest("Game", game_name=game_name):
|
||||||
multiworld = setup_solo_multiworld(world_type, gen_steps)
|
multiworld = setup_solo_multiworld(world_type, gen_steps)
|
||||||
region_count = len(multiworld.get_regions())
|
region_count = len(multiworld.get_regions())
|
||||||
@@ -54,13 +57,13 @@ class TestBase(unittest.TestCase):
|
|||||||
call_all(multiworld, "generate_basic")
|
call_all(multiworld, "generate_basic")
|
||||||
self.assertEqual(region_count, len(multiworld.get_regions()),
|
self.assertEqual(region_count, len(multiworld.get_regions()),
|
||||||
f"{game_name} modified region count during generate_basic")
|
f"{game_name} modified region count during generate_basic")
|
||||||
self.assertGreaterEqual(location_count, len(multiworld.get_locations()),
|
self.assertEqual(location_count, len(multiworld.get_locations()),
|
||||||
f"{game_name} modified locations count during generate_basic")
|
f"{game_name} modified locations count during generate_basic")
|
||||||
|
|
||||||
call_all(multiworld, "pre_fill")
|
call_all(multiworld, "pre_fill")
|
||||||
self.assertEqual(region_count, len(multiworld.get_regions()),
|
self.assertEqual(region_count, len(multiworld.get_regions()),
|
||||||
f"{game_name} modified region count during pre_fill")
|
f"{game_name} modified region count during pre_fill")
|
||||||
self.assertGreaterEqual(location_count, len(multiworld.get_locations()),
|
self.assertEqual(location_count, len(multiworld.get_locations()),
|
||||||
f"{game_name} modified locations count during pre_fill")
|
f"{game_name} modified locations count during pre_fill")
|
||||||
|
|
||||||
def test_location_group(self):
|
def test_location_group(self):
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
from BaseClasses import PlandoOptions
|
from BaseClasses import PlandoOptions
|
||||||
from Options import ItemLinks, Choice
|
from Options import Choice, ItemLinks, PlandoConnections, PlandoItems, PlandoTexts
|
||||||
from Utils import restricted_dumps
|
from Utils import restricted_dumps
|
||||||
from worlds.AutoWorld import AutoWorldRegister
|
from worlds.AutoWorld import AutoWorldRegister
|
||||||
|
|
||||||
@@ -72,8 +72,8 @@ class TestOptions(unittest.TestCase):
|
|||||||
for link in item_links.values():
|
for link in item_links.values():
|
||||||
self.assertEqual(link.value[0], item_link_group[0])
|
self.assertEqual(link.value[0], item_link_group[0])
|
||||||
|
|
||||||
def test_pickle_dumps(self):
|
def test_pickle_dumps_default(self):
|
||||||
"""Test options can be pickled into database for WebHost generation"""
|
"""Test that default option values can be pickled into database for WebHost generation"""
|
||||||
for gamename, world_type in AutoWorldRegister.world_types.items():
|
for gamename, world_type in AutoWorldRegister.world_types.items():
|
||||||
if not world_type.hidden:
|
if not world_type.hidden:
|
||||||
for option_key, option in world_type.options_dataclass.type_hints.items():
|
for option_key, option in world_type.options_dataclass.type_hints.items():
|
||||||
@@ -81,3 +81,23 @@ class TestOptions(unittest.TestCase):
|
|||||||
restricted_dumps(option.from_any(option.default))
|
restricted_dumps(option.from_any(option.default))
|
||||||
if issubclass(option, Choice) and option.default in option.name_lookup:
|
if issubclass(option, Choice) and option.default in option.name_lookup:
|
||||||
restricted_dumps(option.from_text(option.name_lookup[option.default]))
|
restricted_dumps(option.from_text(option.name_lookup[option.default]))
|
||||||
|
|
||||||
|
def test_pickle_dumps_plando(self):
|
||||||
|
"""Test that plando options using containers of a custom type can be pickled"""
|
||||||
|
# The base PlandoConnections class can't be instantiated directly, create a subclass and then cast it
|
||||||
|
class TestPlandoConnections(PlandoConnections):
|
||||||
|
entrances = {"An Entrance"}
|
||||||
|
exits = {"An Exit"}
|
||||||
|
plando_connection_value = PlandoConnections(
|
||||||
|
TestPlandoConnections.from_any([{"entrance": "An Entrance", "exit": "An Exit"}])
|
||||||
|
)
|
||||||
|
|
||||||
|
plando_values = {
|
||||||
|
"PlandoConnections": plando_connection_value,
|
||||||
|
"PlandoItems": PlandoItems.from_any([{"item": "Something", "location": "Somewhere"}]),
|
||||||
|
"PlandoTexts": PlandoTexts.from_any([{"text": "Some text.", "at": "text_box"}]),
|
||||||
|
}
|
||||||
|
|
||||||
|
for option_key, value in plando_values.items():
|
||||||
|
with self.subTest(option=option_key):
|
||||||
|
restricted_dumps(value)
|
||||||
|
|||||||
102
test/general/test_world_manifest.py
Normal file
102
test/general/test_world_manifest.py
Normal file
@@ -0,0 +1,102 @@
|
|||||||
|
"""Check world sources' manifest files"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import unittest
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, ClassVar
|
||||||
|
|
||||||
|
import test
|
||||||
|
from Utils import home_path, local_path
|
||||||
|
from worlds.AutoWorld import AutoWorldRegister
|
||||||
|
from ..param import classvar_matrix
|
||||||
|
|
||||||
|
|
||||||
|
test_path = Path(test.__file__).parent
|
||||||
|
worlds_paths = [
|
||||||
|
Path(local_path("worlds")),
|
||||||
|
Path(local_path("custom_worlds")),
|
||||||
|
Path(home_path("worlds")),
|
||||||
|
Path(home_path("custom_worlds")),
|
||||||
|
]
|
||||||
|
|
||||||
|
# Only check source folders for now. Zip validation should probably be in the loader and/or installer.
|
||||||
|
source_world_names = [
|
||||||
|
k
|
||||||
|
for k, v in AutoWorldRegister.world_types.items()
|
||||||
|
if not v.zip_path and not Path(v.__file__).is_relative_to(test_path)
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def get_source_world_manifest_path(game: str) -> Path | None:
|
||||||
|
"""Get path of archipelago.json in the world's root folder from game name."""
|
||||||
|
# TODO: add a feature to AutoWorld that makes this less annoying
|
||||||
|
world_type = AutoWorldRegister.world_types[game]
|
||||||
|
world_type_path = Path(world_type.__file__)
|
||||||
|
for worlds_path in worlds_paths:
|
||||||
|
if world_type_path.is_relative_to(worlds_path):
|
||||||
|
world_root = worlds_path / world_type_path.relative_to(worlds_path).parents[0]
|
||||||
|
manifest_path = world_root / "archipelago.json"
|
||||||
|
return manifest_path if manifest_path.exists() else None
|
||||||
|
assert False, f"{world_type_path} not found in any worlds path"
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: remove the filter once manifests are mandatory.
|
||||||
|
@classvar_matrix(game=filter(get_source_world_manifest_path, source_world_names))
|
||||||
|
class TestWorldManifest(unittest.TestCase):
|
||||||
|
game: ClassVar[str]
|
||||||
|
manifest: ClassVar[dict[str, Any]]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def setUpClass(cls) -> None:
|
||||||
|
world_type = AutoWorldRegister.world_types[cls.game]
|
||||||
|
assert world_type.game == cls.game
|
||||||
|
manifest_path = get_source_world_manifest_path(cls.game)
|
||||||
|
assert manifest_path # make mypy happy
|
||||||
|
with manifest_path.open("r", encoding="utf-8") as f:
|
||||||
|
cls.manifest = json.load(f)
|
||||||
|
|
||||||
|
def test_game(self) -> None:
|
||||||
|
"""Test that 'game' will be correctly defined when generating APWorld manifest from source."""
|
||||||
|
self.assertIn(
|
||||||
|
"game",
|
||||||
|
self.manifest,
|
||||||
|
f"archipelago.json manifest exists for {self.game} but does not contain 'game'",
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
self.manifest["game"],
|
||||||
|
self.game,
|
||||||
|
f"archipelago.json manifest for {self.game} specifies wrong game '{self.manifest['game']}'",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_world_version(self) -> None:
|
||||||
|
"""Test that world_version matches the requirements in apworld specification.md"""
|
||||||
|
if "world_version" in self.manifest:
|
||||||
|
world_version: str = self.manifest["world_version"]
|
||||||
|
self.assertIsInstance(
|
||||||
|
world_version,
|
||||||
|
str,
|
||||||
|
f"world_version in archipelago.json for '{self.game}' has to be string if provided.",
|
||||||
|
)
|
||||||
|
parts = world_version.split(".")
|
||||||
|
self.assertEqual(
|
||||||
|
len(parts),
|
||||||
|
3,
|
||||||
|
f"world_version in archipelago.json for '{self.game}' has to be in the form of 'major.minor.build'.",
|
||||||
|
)
|
||||||
|
for part in parts:
|
||||||
|
self.assertTrue(
|
||||||
|
part.isdigit(),
|
||||||
|
f"world_version in archipelago.json for '{self.game}' may only contain numbers.",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_no_container_version(self) -> None:
|
||||||
|
self.assertNotIn(
|
||||||
|
"version",
|
||||||
|
self.manifest,
|
||||||
|
f"archipelago.json for '{self.game}' must not define 'version', see apworld specification.md.",
|
||||||
|
)
|
||||||
|
self.assertNotIn(
|
||||||
|
"compatible_version",
|
||||||
|
self.manifest,
|
||||||
|
f"archipelago.json for '{self.game}' must not define 'compatible_version', see apworld specification.md.",
|
||||||
|
)
|
||||||
@@ -3,6 +3,7 @@
|
|||||||
# Run with `python test/hosting` instead,
|
# Run with `python test/hosting` instead,
|
||||||
import logging
|
import logging
|
||||||
import traceback
|
import traceback
|
||||||
|
from pathlib import Path
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from typing import Any
|
from typing import Any
|
||||||
@@ -11,7 +12,7 @@ from test.hosting.client import Client
|
|||||||
from test.hosting.generate import generate_local
|
from test.hosting.generate import generate_local
|
||||||
from test.hosting.serve import ServeGame, LocalServeGame, WebHostServeGame
|
from test.hosting.serve import ServeGame, LocalServeGame, WebHostServeGame
|
||||||
from test.hosting.webhost import (create_room, get_app, get_multidata_for_room, set_multidata_for_room, start_room,
|
from test.hosting.webhost import (create_room, get_app, get_multidata_for_room, set_multidata_for_room, start_room,
|
||||||
stop_autohost, upload_multidata)
|
stop_autogen, stop_autohost, upload_multidata, generate_remote)
|
||||||
from test.hosting.world import copy as copy_world, delete as delete_world
|
from test.hosting.world import copy as copy_world, delete as delete_world
|
||||||
|
|
||||||
failure = False
|
failure = False
|
||||||
@@ -56,35 +57,62 @@ else:
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
warnings.simplefilter("ignore", ResourceWarning)
|
warnings.simplefilter("ignore", ResourceWarning)
|
||||||
warnings.simplefilter("ignore", UserWarning)
|
warnings.simplefilter("ignore", UserWarning)
|
||||||
|
warnings.simplefilter("ignore", DeprecationWarning)
|
||||||
|
|
||||||
spacer = '=' * 80
|
spacer = '=' * 80
|
||||||
|
|
||||||
with TemporaryDirectory() as tempdir:
|
with TemporaryDirectory() as tempdir:
|
||||||
|
empty_file = str(Path(tempdir) / "empty")
|
||||||
|
open(empty_file, "w").close()
|
||||||
|
sys.argv += ["--config_override", empty_file] # tests #5541
|
||||||
multis = [["VVVVVV"], ["Temp World"], ["VVVVVV", "Temp World"]]
|
multis = [["VVVVVV"], ["Temp World"], ["VVVVVV", "Temp World"]]
|
||||||
p1_games = []
|
p1_games: list[str] = []
|
||||||
data_paths = []
|
data_paths: list[Path | None] = []
|
||||||
rooms = []
|
rooms: list[str] = []
|
||||||
|
multidata: Path | None
|
||||||
|
|
||||||
copy_world("VVVVVV", "Temp World")
|
copy_world("VVVVVV", "Temp World")
|
||||||
try:
|
try:
|
||||||
for n, games in enumerate(multis, 1):
|
for n, games in enumerate(multis, 1):
|
||||||
print(f"Generating [{n}] {', '.join(games)}")
|
print(f"Generating [{n}] {', '.join(games)} offline")
|
||||||
multidata = generate_local(games, tempdir)
|
multidata = generate_local(games, tempdir)
|
||||||
print(f"Generated [{n}] {', '.join(games)} as {multidata}\n")
|
print(f"Generated [{n}] {', '.join(games)} as {multidata}\n")
|
||||||
p1_games.append(games[0])
|
|
||||||
data_paths.append(multidata)
|
data_paths.append(multidata)
|
||||||
|
p1_games.append(games[0])
|
||||||
finally:
|
finally:
|
||||||
delete_world("Temp World")
|
delete_world("Temp World")
|
||||||
|
|
||||||
webapp = get_app(tempdir)
|
webapp = get_app(tempdir)
|
||||||
webhost_client = webapp.test_client()
|
webhost_client = webapp.test_client()
|
||||||
|
|
||||||
for n, multidata in enumerate(data_paths, 1):
|
for n, multidata in enumerate(data_paths, 1):
|
||||||
|
assert multidata
|
||||||
seed = upload_multidata(webhost_client, multidata)
|
seed = upload_multidata(webhost_client, multidata)
|
||||||
|
print(f"Uploaded [{n}] {multidata} as {seed}\n")
|
||||||
room = create_room(webhost_client, seed)
|
room = create_room(webhost_client, seed)
|
||||||
print(f"Uploaded [{n}] {multidata} as {room}\n")
|
print(f"Started [{n}] {seed} as {room}\n")
|
||||||
|
rooms.append(room)
|
||||||
|
|
||||||
|
# Generate 1 extra game on WebHost
|
||||||
|
from WebHostLib.autolauncher import autogen
|
||||||
|
for n, games in enumerate(multis[:1], len(multis) + 1):
|
||||||
|
multis.append(games)
|
||||||
|
try:
|
||||||
|
print(f"Generating [{n}] {', '.join(games)} online")
|
||||||
|
autogen(webapp.config)
|
||||||
|
sleep(5) # until we have lazy loading of worlds, wait here for the process to start up
|
||||||
|
seed = generate_remote(webhost_client, games)
|
||||||
|
print(f"Generated [{n}] {', '.join(games)} as {seed}\n")
|
||||||
|
finally:
|
||||||
|
stop_autogen()
|
||||||
|
data_paths.append(None) # WebHost-only
|
||||||
|
room = create_room(webhost_client, seed)
|
||||||
|
print(f"Started [{n}] {seed} as {room}\n")
|
||||||
rooms.append(room)
|
rooms.append(room)
|
||||||
|
|
||||||
print("Starting autohost")
|
print("Starting autohost")
|
||||||
@@ -96,31 +124,10 @@ if __name__ == "__main__":
|
|||||||
for n, (multidata, room, game, multi_games) in enumerate(zip(data_paths, rooms, p1_games, multis), 1):
|
for n, (multidata, room, game, multi_games) in enumerate(zip(data_paths, rooms, p1_games, multis), 1):
|
||||||
involved_games = {"Archipelago"} | set(multi_games)
|
involved_games = {"Archipelago"} | set(multi_games)
|
||||||
for collected_items in range(3):
|
for collected_items in range(3):
|
||||||
print(f"\nTesting [{n}] {game} in {multidata} on MultiServer with {collected_items} items collected")
|
|
||||||
with LocalServeGame(multidata) as host:
|
|
||||||
with Client(host.address, game, "Player1") as client:
|
|
||||||
local_data_packages = client.games_packages
|
|
||||||
local_collected_items = len(client.checked_locations)
|
|
||||||
if collected_items < 2: # Don't collect anything on the last iteration
|
|
||||||
client.collect_any()
|
|
||||||
# TODO: Ctrl+C test here as well
|
|
||||||
|
|
||||||
for game_name in sorted(involved_games):
|
|
||||||
expect_true(game_name in local_data_packages,
|
|
||||||
f"{game_name} missing from MultiServer datap ackage")
|
|
||||||
expect_true("item_name_groups" not in local_data_packages.get(game_name, {}),
|
|
||||||
f"item_name_groups are not supposed to be in MultiServer data for {game_name}")
|
|
||||||
expect_true("location_name_groups" not in local_data_packages.get(game_name, {}),
|
|
||||||
f"location_name_groups are not supposed to be in MultiServer data for {game_name}")
|
|
||||||
for game_name in local_data_packages:
|
|
||||||
expect_true(game_name in involved_games,
|
|
||||||
f"Received unexpected extra data package for {game_name} from MultiServer")
|
|
||||||
assert_equal(local_collected_items, collected_items,
|
|
||||||
"MultiServer did not load or save correctly")
|
|
||||||
|
|
||||||
print(f"\nTesting [{n}] {game} in {multidata} on customserver with {collected_items} items collected")
|
print(f"\nTesting [{n}] {game} in {multidata} on customserver with {collected_items} items collected")
|
||||||
prev_host_adr: str
|
prev_host_adr: str
|
||||||
with WebHostServeGame(webhost_client, room) as host:
|
with WebHostServeGame(webhost_client, room) as host:
|
||||||
|
sleep(.1) # wait for the server to fully start before doing anything
|
||||||
prev_host_adr = host.address
|
prev_host_adr = host.address
|
||||||
with Client(host.address, game, "Player1") as client:
|
with Client(host.address, game, "Player1") as client:
|
||||||
web_data_packages = client.games_packages
|
web_data_packages = client.games_packages
|
||||||
@@ -134,6 +141,7 @@ if __name__ == "__main__":
|
|||||||
autohost(webapp.config) # this will spin the room right up again
|
autohost(webapp.config) # this will spin the room right up again
|
||||||
sleep(1) # make log less annoying
|
sleep(1) # make log less annoying
|
||||||
# if saving failed, the next iteration will fail below
|
# if saving failed, the next iteration will fail below
|
||||||
|
sleep(2) # work around issue #5571
|
||||||
|
|
||||||
# verify server shut down
|
# verify server shut down
|
||||||
try:
|
try:
|
||||||
@@ -156,6 +164,31 @@ if __name__ == "__main__":
|
|||||||
"customserver did not load or save correctly during/after "
|
"customserver did not load or save correctly during/after "
|
||||||
+ ("Ctrl+C" if collected_items == 2 else "/exit"))
|
+ ("Ctrl+C" if collected_items == 2 else "/exit"))
|
||||||
|
|
||||||
|
if not multidata:
|
||||||
|
continue # games rolled on WebHost can not be tested against MultiServer
|
||||||
|
|
||||||
|
print(f"\nTesting [{n}] {game} in {multidata} on MultiServer with {collected_items} items collected")
|
||||||
|
with LocalServeGame(multidata) as host:
|
||||||
|
with Client(host.address, game, "Player1") as client:
|
||||||
|
local_data_packages = client.games_packages
|
||||||
|
local_collected_items = len(client.checked_locations)
|
||||||
|
if collected_items < 2: # Don't collect anything on the last iteration
|
||||||
|
client.collect_any()
|
||||||
|
# TODO: Ctrl+C test here as well
|
||||||
|
|
||||||
|
for game_name in sorted(involved_games):
|
||||||
|
expect_true(game_name in local_data_packages,
|
||||||
|
f"{game_name} missing from MultiServer datapackage")
|
||||||
|
expect_true("item_name_groups" not in local_data_packages.get(game_name, {}),
|
||||||
|
f"item_name_groups are not supposed to be in MultiServer data for {game_name}")
|
||||||
|
expect_true("location_name_groups" not in local_data_packages.get(game_name, {}),
|
||||||
|
f"location_name_groups are not supposed to be in MultiServer data for {game_name}")
|
||||||
|
for game_name in local_data_packages:
|
||||||
|
expect_true(game_name in involved_games,
|
||||||
|
f"Received unexpected extra data package for {game_name} from MultiServer")
|
||||||
|
assert_equal(local_collected_items, collected_items,
|
||||||
|
"MultiServer did not load or save correctly")
|
||||||
|
|
||||||
# compare customserver to MultiServer
|
# compare customserver to MultiServer
|
||||||
expect_equal(local_data_packages, web_data_packages,
|
expect_equal(local_data_packages, web_data_packages,
|
||||||
"customserver datapackage differs from MultiServer")
|
"customserver datapackage differs from MultiServer")
|
||||||
@@ -176,10 +209,12 @@ if __name__ == "__main__":
|
|||||||
print(f"Restoring multidata for {room}")
|
print(f"Restoring multidata for {room}")
|
||||||
set_multidata_for_room(webhost_client, room, old_data)
|
set_multidata_for_room(webhost_client, room, old_data)
|
||||||
with WebHostServeGame(webhost_client, room) as host:
|
with WebHostServeGame(webhost_client, room) as host:
|
||||||
|
sleep(.1) # wait for the server to fully start before doing anything
|
||||||
with Client(host.address, game, "Player1") as client:
|
with Client(host.address, game, "Player1") as client:
|
||||||
assert_equal(len(client.checked_locations), 2,
|
assert_equal(len(client.checked_locations), 2,
|
||||||
"Save was destroyed during exception in customserver")
|
"Save was destroyed during exception in customserver")
|
||||||
print("Save file is not busted 🥳")
|
print("Save file is not busted 🥳")
|
||||||
|
sleep(2) # work around issue #5571
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
print("Stopping autohost")
|
print("Stopping autohost")
|
||||||
|
|||||||
@@ -1,6 +1,10 @@
|
|||||||
|
import io
|
||||||
|
import json
|
||||||
import re
|
import re
|
||||||
|
import time
|
||||||
|
import zipfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING, Optional, cast
|
from typing import TYPE_CHECKING, Iterable, Optional, cast
|
||||||
|
|
||||||
from WebHostLib import to_python
|
from WebHostLib import to_python
|
||||||
|
|
||||||
@@ -10,6 +14,7 @@ if TYPE_CHECKING:
|
|||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"get_app",
|
"get_app",
|
||||||
|
"generate_remote",
|
||||||
"upload_multidata",
|
"upload_multidata",
|
||||||
"create_room",
|
"create_room",
|
||||||
"start_room",
|
"start_room",
|
||||||
@@ -17,6 +22,7 @@ __all__ = [
|
|||||||
"set_room_timeout",
|
"set_room_timeout",
|
||||||
"get_multidata_for_room",
|
"get_multidata_for_room",
|
||||||
"set_multidata_for_room",
|
"set_multidata_for_room",
|
||||||
|
"stop_autogen",
|
||||||
"stop_autohost",
|
"stop_autohost",
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -33,10 +39,43 @@ def get_app(tempdir: str) -> "Flask":
|
|||||||
"TESTING": True,
|
"TESTING": True,
|
||||||
"HOST_ADDRESS": "localhost",
|
"HOST_ADDRESS": "localhost",
|
||||||
"HOSTERS": 1,
|
"HOSTERS": 1,
|
||||||
|
"GENERATORS": 1,
|
||||||
|
"JOB_THRESHOLD": 1,
|
||||||
})
|
})
|
||||||
return get_app()
|
return get_app()
|
||||||
|
|
||||||
|
|
||||||
|
def generate_remote(app_client: "FlaskClient", games: Iterable[str]) -> str:
|
||||||
|
data = io.BytesIO()
|
||||||
|
with zipfile.ZipFile(data, "a", zipfile.ZIP_DEFLATED, False) as zip_file:
|
||||||
|
for n, game in enumerate(games, 1):
|
||||||
|
name = f"{n}.yaml"
|
||||||
|
zip_file.writestr(name, json.dumps({
|
||||||
|
"name": f"Player{n}",
|
||||||
|
"game": game,
|
||||||
|
game: {},
|
||||||
|
"description": f"generate_remote slot {n} ('Player{n}'): {game}",
|
||||||
|
}))
|
||||||
|
data.seek(0)
|
||||||
|
response = app_client.post("/generate", content_type="multipart/form-data", data={
|
||||||
|
"file": (data, "yamls.zip"),
|
||||||
|
})
|
||||||
|
assert response.status_code < 400, f"Starting gen failed: status {response.status_code}"
|
||||||
|
assert "Location" in response.headers, f"Starting gen failed: no redirect"
|
||||||
|
location = response.headers["Location"]
|
||||||
|
assert isinstance(location, str)
|
||||||
|
assert location.startswith("/wait/"), f"Starting WebHost gen failed: unexpected redirect to {location}"
|
||||||
|
for attempt in range(10):
|
||||||
|
response = app_client.get(location)
|
||||||
|
if "Location" in response.headers:
|
||||||
|
location = response.headers["Location"]
|
||||||
|
assert isinstance(location, str)
|
||||||
|
assert location.startswith("/seed/"), f"Finishing WebHost gen failed: unexpected redirect to {location}"
|
||||||
|
return location[6:]
|
||||||
|
time.sleep(1)
|
||||||
|
raise TimeoutError("WebHost gen did not finish")
|
||||||
|
|
||||||
|
|
||||||
def upload_multidata(app_client: "FlaskClient", multidata: Path) -> str:
|
def upload_multidata(app_client: "FlaskClient", multidata: Path) -> str:
|
||||||
response = app_client.post("/uploads", data={
|
response = app_client.post("/uploads", data={
|
||||||
"file": multidata.open("rb"),
|
"file": multidata.open("rb"),
|
||||||
@@ -188,7 +227,7 @@ def set_multidata_for_room(webhost_client: "FlaskClient", room_id: str, data: by
|
|||||||
room.seed.multidata = data
|
room.seed.multidata = data
|
||||||
|
|
||||||
|
|
||||||
def stop_autohost(graceful: bool = True) -> None:
|
def _stop_webhost_mp(name_filter: str, graceful: bool = True) -> None:
|
||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
|
|
||||||
@@ -198,13 +237,30 @@ def stop_autohost(graceful: bool = True) -> None:
|
|||||||
|
|
||||||
stop()
|
stop()
|
||||||
proc: multiprocessing.process.BaseProcess
|
proc: multiprocessing.process.BaseProcess
|
||||||
for proc in filter(lambda child: child.name.startswith("MultiHoster"), multiprocessing.active_children()):
|
for proc in filter(lambda child: child.name.startswith(name_filter), multiprocessing.active_children()):
|
||||||
|
# FIXME: graceful currently does not work on Windows because the signals are not properly emulated
|
||||||
|
# and ungraceful may not save the game
|
||||||
|
if proc.pid == os.getpid():
|
||||||
|
continue
|
||||||
if graceful and proc.pid:
|
if graceful and proc.pid:
|
||||||
os.kill(proc.pid, getattr(signal, "CTRL_C_EVENT", signal.SIGINT))
|
os.kill(proc.pid, getattr(signal, "CTRL_C_EVENT", signal.SIGINT))
|
||||||
else:
|
else:
|
||||||
proc.kill()
|
proc.kill()
|
||||||
try:
|
try:
|
||||||
proc.join(30)
|
try:
|
||||||
|
proc.join(30)
|
||||||
|
except TimeoutError:
|
||||||
|
raise
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
# on Windows, the MP exception may be forwarded to the host, so ignore once and retry
|
||||||
|
proc.join(30)
|
||||||
except TimeoutError:
|
except TimeoutError:
|
||||||
proc.kill()
|
proc.kill()
|
||||||
proc.join()
|
proc.join()
|
||||||
|
|
||||||
|
def stop_autogen(graceful: bool = True) -> None:
|
||||||
|
# FIXME: this name filter is jank, but there seems to be no way to add a custom prefix for a Pool
|
||||||
|
_stop_webhost_mp("SpawnPoolWorker-", graceful)
|
||||||
|
|
||||||
|
def stop_autohost(graceful: bool = True) -> None:
|
||||||
|
_stop_webhost_mp("MultiHoster", graceful)
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ _new_worlds: dict[str, str] = {}
|
|||||||
|
|
||||||
def copy(src: str, dst: str) -> None:
|
def copy(src: str, dst: str) -> None:
|
||||||
from Utils import get_file_safe_name
|
from Utils import get_file_safe_name
|
||||||
from worlds import AutoWorldRegister
|
from worlds.AutoWorld import AutoWorldRegister
|
||||||
|
|
||||||
assert dst not in _new_worlds, "World already created"
|
assert dst not in _new_worlds, "World already created"
|
||||||
if '"' in dst or "\\" in dst: # easier to reject than to escape
|
if '"' in dst or "\\" in dst: # easier to reject than to escape
|
||||||
|
|||||||
14
test/utils/test_daemon_thread_pool.py
Normal file
14
test/utils/test_daemon_thread_pool.py
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import unittest
|
||||||
|
|
||||||
|
from Utils import DaemonThreadPoolExecutor
|
||||||
|
|
||||||
|
|
||||||
|
class DaemonThreadPoolExecutorTest(unittest.TestCase):
|
||||||
|
def test_is_daemon(self) -> None:
|
||||||
|
def run() -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
with DaemonThreadPoolExecutor(1) as executor:
|
||||||
|
executor.submit(run)
|
||||||
|
|
||||||
|
self.assertTrue(next(iter(executor._threads)).daemon)
|
||||||
78
test/webhost/test_markdown.py
Normal file
78
test/webhost/test_markdown.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
from tempfile import NamedTemporaryFile
|
||||||
|
|
||||||
|
from mistune import HTMLRenderer, Markdown
|
||||||
|
|
||||||
|
from WebHostLib.markdown import ImgUrlRewriteInlineParser, render_markdown
|
||||||
|
|
||||||
|
|
||||||
|
class ImgUrlRewriteTest(unittest.TestCase):
|
||||||
|
markdown: Markdown
|
||||||
|
base_url = "/static/generated/docs/some_game"
|
||||||
|
|
||||||
|
def setUp(self) -> None:
|
||||||
|
self.markdown = Markdown(
|
||||||
|
renderer=HTMLRenderer(escape=False),
|
||||||
|
inline=ImgUrlRewriteInlineParser(self.base_url),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_relative_img_rewrite(self) -> None:
|
||||||
|
html = self.markdown("")
|
||||||
|
self.assertIn(f'src="{self.base_url}/image.png"', html)
|
||||||
|
|
||||||
|
def test_absolute_img_no_rewrite(self) -> None:
|
||||||
|
html = self.markdown("")
|
||||||
|
self.assertIn(f'src="/image.png"', html)
|
||||||
|
self.assertNotIn(self.base_url, html)
|
||||||
|
|
||||||
|
def test_remote_img_no_rewrite(self) -> None:
|
||||||
|
html = self.markdown("")
|
||||||
|
self.assertIn(f'src="https://example.com/image.png"', html)
|
||||||
|
self.assertNotIn(self.base_url, html)
|
||||||
|
|
||||||
|
def test_relative_link_no_rewrite(self) -> None:
|
||||||
|
# The parser is only supposed to update images, not links.
|
||||||
|
html = self.markdown("[Link](image.png)")
|
||||||
|
self.assertIn(f'href="image.png"', html)
|
||||||
|
self.assertNotIn(self.base_url, html)
|
||||||
|
|
||||||
|
def test_absolute_link_no_rewrite(self) -> None:
|
||||||
|
html = self.markdown("[Link](/image.png)")
|
||||||
|
self.assertIn(f'href="/image.png"', html)
|
||||||
|
self.assertNotIn(self.base_url, html)
|
||||||
|
|
||||||
|
def test_auto_link_no_rewrite(self) -> None:
|
||||||
|
html = self.markdown("<https://example.com/image.png>")
|
||||||
|
self.assertIn(f'href="https://example.com/image.png"', html)
|
||||||
|
self.assertNotIn(self.base_url, html)
|
||||||
|
|
||||||
|
def test_relative_img_to_other_game(self) -> None:
|
||||||
|
html = self.markdown("")
|
||||||
|
self.assertIn(f'src="{self.base_url}/../Archipelago/image.png"', html)
|
||||||
|
|
||||||
|
|
||||||
|
class RenderMarkdownTest(unittest.TestCase):
|
||||||
|
"""Tests that render_markdown does the right thing."""
|
||||||
|
base_url = "/static/generated/docs/some_game"
|
||||||
|
|
||||||
|
def test_relative_img_rewrite(self) -> None:
|
||||||
|
f = NamedTemporaryFile(delete=False)
|
||||||
|
try:
|
||||||
|
f.write("".encode("utf-8"))
|
||||||
|
f.close()
|
||||||
|
html = render_markdown(f.name, self.base_url)
|
||||||
|
self.assertIn(f'src="{self.base_url}/image.png"', html)
|
||||||
|
finally:
|
||||||
|
os.unlink(f.name)
|
||||||
|
|
||||||
|
def test_no_img_rewrite(self) -> None:
|
||||||
|
f = NamedTemporaryFile(delete=False)
|
||||||
|
try:
|
||||||
|
f.write("".encode("utf-8"))
|
||||||
|
f.close()
|
||||||
|
html = render_markdown(f.name)
|
||||||
|
self.assertIn(f'src="image.png"', html)
|
||||||
|
self.assertNotIn(self.base_url, html)
|
||||||
|
finally:
|
||||||
|
os.unlink(f.name)
|
||||||
63
test/webhost/test_sitemap.py
Normal file
63
test/webhost/test_sitemap.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
import urllib.parse
|
||||||
|
import html
|
||||||
|
import re
|
||||||
|
from flask import url_for
|
||||||
|
|
||||||
|
import WebHost
|
||||||
|
from . import TestBase
|
||||||
|
|
||||||
|
|
||||||
|
class TestSitemap(TestBase):
|
||||||
|
|
||||||
|
# Codes for OK and some redirects that we use
|
||||||
|
valid_status_codes = [200, 302, 308]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def setUpClass(cls) -> None:
|
||||||
|
super().setUpClass()
|
||||||
|
WebHost.copy_tutorials_files_to_static()
|
||||||
|
|
||||||
|
def test_sitemap_route(self) -> None:
|
||||||
|
"""Verify that the sitemap route works correctly and renders the template without errors."""
|
||||||
|
with self.app.test_request_context():
|
||||||
|
# Test the /sitemap route
|
||||||
|
with self.client.open("/sitemap") as response:
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertIn(b"Site Map", response.data)
|
||||||
|
|
||||||
|
# Test the /index route which should also serve the sitemap
|
||||||
|
with self.client.open("/index") as response:
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertIn(b"Site Map", response.data)
|
||||||
|
|
||||||
|
# Test using url_for with the function name
|
||||||
|
with self.client.open(url_for('get_sitemap')) as response:
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
self.assertIn(b'Site Map', response.data)
|
||||||
|
|
||||||
|
def test_sitemap_links(self) -> None:
|
||||||
|
"""
|
||||||
|
Verify that all links in the sitemap are valid by making a request to each one.
|
||||||
|
"""
|
||||||
|
with self.app.test_request_context():
|
||||||
|
with self.client.open(url_for("get_sitemap")) as response:
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
html_content = response.data.decode()
|
||||||
|
|
||||||
|
# Extract all href links using regex
|
||||||
|
href_pattern = re.compile(r'href=["\'](.*?)["\']')
|
||||||
|
links = href_pattern.findall(html_content)
|
||||||
|
|
||||||
|
self.assertTrue(len(links) > 0, "No links found in sitemap")
|
||||||
|
|
||||||
|
# Test each link
|
||||||
|
for link in links:
|
||||||
|
# Skip external links
|
||||||
|
if link.startswith(("http://", "https://")):
|
||||||
|
continue
|
||||||
|
|
||||||
|
link = urllib.parse.unquote(html.unescape(link))
|
||||||
|
|
||||||
|
with self.client.open(link) as response, self.subTest(link=link):
|
||||||
|
self.assertIn(response.status_code, self.valid_status_codes,
|
||||||
|
f"Link {link} returned invalid status code {response.status_code}")
|
||||||
@@ -93,3 +93,13 @@ class TestTracker(TestBase):
|
|||||||
headers={"If-Modified-Since": "Wed, 21 Oct 2015 07:28:00"}, # missing timezone
|
headers={"If-Modified-Since": "Wed, 21 Oct 2015 07:28:00"}, # missing timezone
|
||||||
)
|
)
|
||||||
self.assertEqual(response.status_code, 400)
|
self.assertEqual(response.status_code, 400)
|
||||||
|
|
||||||
|
def test_tracker_api(self) -> None:
|
||||||
|
"""Verify that tracker api gives a reply for the room."""
|
||||||
|
with self.app.test_request_context():
|
||||||
|
with self.client.open(url_for("api.tracker_data", tracker=self.tracker_uuid)) as response:
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
with self.client.open(url_for("api.static_tracker_data", tracker=self.tracker_uuid)) as response:
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
with self.client.open(url_for("api.tracker_slot_data", tracker=self.tracker_uuid)) as response:
|
||||||
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|||||||
@@ -1,17 +1,46 @@
|
|||||||
def load_tests(loader, standard_tests, pattern):
|
from typing import TYPE_CHECKING
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from unittest import TestLoader, TestSuite
|
||||||
|
|
||||||
|
|
||||||
|
def load_tests(loader: "TestLoader", standard_tests: "TestSuite", pattern: str):
|
||||||
import os
|
import os
|
||||||
import unittest
|
import unittest
|
||||||
|
import fnmatch
|
||||||
from .. import file_path
|
from .. import file_path
|
||||||
from worlds.AutoWorld import AutoWorldRegister
|
from worlds.AutoWorld import AutoWorldRegister
|
||||||
|
|
||||||
suite = unittest.TestSuite()
|
suite = unittest.TestSuite()
|
||||||
suite.addTests(standard_tests)
|
suite.addTests(standard_tests)
|
||||||
|
|
||||||
|
# pattern hack
|
||||||
|
# all tests from within __init__ are always imported, so we need to filter out the folder earlier
|
||||||
|
# if the pattern isn't matching a specific world, we don't have much of a solution
|
||||||
|
|
||||||
|
if pattern.startswith("worlds."):
|
||||||
|
if pattern.endswith(".py"):
|
||||||
|
pattern = pattern[:-3]
|
||||||
|
components = pattern.split(".")
|
||||||
|
world_glob = f"worlds.{components[1]}"
|
||||||
|
pattern = components[-1]
|
||||||
|
|
||||||
|
elif pattern.startswith(f"worlds{os.path.sep}") or pattern.startswith(f"worlds{os.path.altsep}"):
|
||||||
|
components = pattern.split(os.path.sep)
|
||||||
|
if len(components) == 1:
|
||||||
|
components = pattern.split(os.path.altsep)
|
||||||
|
world_glob = f"worlds.{components[1]}"
|
||||||
|
pattern = components[-1]
|
||||||
|
else:
|
||||||
|
world_glob = "*"
|
||||||
|
|
||||||
|
|
||||||
folders = [os.path.join(os.path.split(world.__file__)[0], "test")
|
folders = [os.path.join(os.path.split(world.__file__)[0], "test")
|
||||||
for world in AutoWorldRegister.world_types.values()]
|
for world in AutoWorldRegister.world_types.values()
|
||||||
|
if fnmatch.fnmatch(world.__module__, world_glob)]
|
||||||
|
|
||||||
all_tests = [
|
all_tests = [
|
||||||
test_case for folder in folders if os.path.exists(folder)
|
test_case for folder in folders if os.path.exists(folder)
|
||||||
for test_collection in loader.discover(folder, top_level_dir=file_path)
|
for test_collection in loader.discover(folder, top_level_dir=file_path, pattern=pattern)
|
||||||
for test_suite in test_collection if isinstance(test_suite, unittest.suite.TestSuite)
|
for test_suite in test_collection if isinstance(test_suite, unittest.suite.TestSuite)
|
||||||
for test_case in test_suite
|
for test_case in test_suite
|
||||||
]
|
]
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user