diff --git a/assets/jsons/1x1.json b/assets/jsons/1x1.json5 similarity index 100% rename from assets/jsons/1x1.json rename to assets/jsons/1x1.json5 diff --git a/assets/jsons/borders.json b/assets/jsons/borders.json5 similarity index 100% rename from assets/jsons/borders.json rename to assets/jsons/borders.json5 diff --git a/assets/jsons/chris_vanilla.json b/assets/jsons/chris_vanilla.json5 similarity index 100% rename from assets/jsons/chris_vanilla.json rename to assets/jsons/chris_vanilla.json5 diff --git a/assets/jsons/corners.json b/assets/jsons/corners.json5 similarity index 100% rename from assets/jsons/corners.json rename to assets/jsons/corners.json5 diff --git a/assets/jsons/crossings.json b/assets/jsons/crossings.json5 similarity index 100% rename from assets/jsons/crossings.json rename to assets/jsons/crossings.json5 diff --git a/assets/jsons/flat.json b/assets/jsons/flat.json5 similarity index 100% rename from assets/jsons/flat.json rename to assets/jsons/flat.json5 diff --git a/assets/jsons/platforms.json b/assets/jsons/platforms.json5 similarity index 100% rename from assets/jsons/platforms.json rename to assets/jsons/platforms.json5 diff --git a/assets/jsons/sides.json b/assets/jsons/sides.json5 similarity index 100% rename from assets/jsons/sides.json rename to assets/jsons/sides.json5 diff --git a/assets/jsons/streets.json b/assets/jsons/streets.json5 similarity index 100% rename from assets/jsons/streets.json rename to assets/jsons/streets.json5 diff --git a/assets/jsons/terminals.json b/assets/jsons/terminals.json5 similarity index 100% rename from assets/jsons/terminals.json rename to assets/jsons/terminals.json5 diff --git a/assets/jsons/trafos.json b/assets/jsons/trafos.json5 similarity index 100% rename from assets/jsons/trafos.json rename to assets/jsons/trafos.json5 diff --git a/assets/jsons/transitions.json b/assets/jsons/transitions.json5 similarity index 100% rename from assets/jsons/transitions.json rename to assets/jsons/transitions.json5 diff --git a/assets/jsons/wide_floors.json b/assets/jsons/wide_floors.json5 similarity index 100% rename from assets/jsons/wide_floors.json rename to assets/jsons/wide_floors.json5 diff --git a/scripts/build_jsons.py b/scripts/build_jsons.py index 3e2e83f..e01255c 100644 --- a/scripts/build_jsons.py +++ b/scripts/build_jsons.py @@ -2,12 +2,13 @@ import json, logging from pathlib import Path import common from common import AssetKind +import json5 def _compress_json(src_file: Path, dst_file: Path) -> None: # load data first with open(src_file, 'r', encoding='utf-8') as f: - loaded_prototypes = json.load(f) + loaded_prototypes = json5.load(f) # save result with compress config with open(dst_file, 'w', encoding='utf-8') as f: @@ -24,13 +25,14 @@ def build_jsons() -> None: raw_jsons_dir = common.get_raw_assets_folder(AssetKind.Jsons) plg_jsons_dir = common.get_plugin_assets_folder(AssetKind.Jsons) - for raw_json_file in raw_jsons_dir.glob('*.json'): + for raw_json_file in raw_jsons_dir.glob('*.json5'): # Skip non-file. if not raw_json_file.is_file(): continue # Build final path plg_json_file = plg_jsons_dir / raw_json_file.relative_to(raw_jsons_dir) + plg_json_file = plg_json_file.with_suffix('.json') # Show message logging.info(f'Compressing {raw_json_file} -> {plg_json_file}') diff --git a/scripts/common.py b/scripts/common.py index 2f2ccd7..598a92b 100644 --- a/scripts/common.py +++ b/scripts/common.py @@ -1,4 +1,4 @@ -import os, typing, logging, enum +import logging, enum, typing from pathlib import Path diff --git a/scripts/extract_jsons.py b/scripts/extract_jsons.py index a2332d3..30ed626 100644 --- a/scripts/extract_jsons.py +++ b/scripts/extract_jsons.py @@ -1,8 +1,8 @@ -import json, logging, typing, itertools +import logging, typing, itertools from pathlib import Path import common, bme from common import AssetKind -import pydantic, polib +import pydantic, polib, json5 ## YYC MARK: # This translation context string prefix is cpoied from UTIL_translation.py. @@ -38,14 +38,14 @@ def _extract_json(json_file: Path) -> typing.Iterator[polib.POEntry]: try: # Read file and convert it into BME struct. with open(json_file, 'r', encoding='utf-8') as f: - document = json.load(f) + document = json5.load(f) prototypes = bme.Prototypes.model_validate(document) # Extract translation return itertools.chain.from_iterable(_extract_prototype(prototype) for prototype in prototypes.root) - except json.JSONDecodeError: - logging.error(f'Can not extract translation from {json_file} due to JSON error. Please validate it first.') except pydantic.ValidationError: logging.error(f'Can not extract translation from {json_file} due to struct error. Please validate it first.') + except (ValueError, UnicodeDecodeError): + logging.error(f'Can not extract translation from {json_file} due to JSON5 error. Please validate it first.') # Output nothing return itertools.chain.from_iterable(()) @@ -70,7 +70,7 @@ def extract_jsons() -> None: } # Iterate all prototypes and add into POT - for raw_json_file in raw_jsons_dir.glob('*.json'): + for raw_json_file in raw_jsons_dir.glob('*.json5'): # Skip non-file. if not raw_json_file.is_file(): continue diff --git a/scripts/pyproject.toml b/scripts/pyproject.toml index 64b7255..95e7487 100644 --- a/scripts/pyproject.toml +++ b/scripts/pyproject.toml @@ -3,6 +3,7 @@ name = "scripts" version = "1.0.0" requires-python = ">=3.11" dependencies = [ + "json5>=0.12.0", "pillow==10.2.0", "polib>=1.2.0", "pydantic>=2.11.7", diff --git a/scripts/uv.lock b/scripts/uv.lock index 5115a27..7d71075 100644 --- a/scripts/uv.lock +++ b/scripts/uv.lock @@ -11,6 +11,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] +[[package]] +name = "json5" +version = "0.12.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/12/be/c6c745ec4c4539b25a278b70e29793f10382947df0d9efba2fa09120895d/json5-0.12.0.tar.gz", hash = "sha256:0b4b6ff56801a1c7dc817b0241bca4ce474a0e6a163bfef3fc594d3fd263ff3a", size = 51907, upload-time = "2025-04-03T16:33:13.201Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/9f/3500910d5a98549e3098807493851eeef2b89cdd3032227558a104dfe926/json5-0.12.0-py3-none-any.whl", hash = "sha256:6d37aa6c08b0609f16e1ec5ff94697e2cbbfbad5ac112afa05794da9ab7810db", size = 36079, upload-time = "2025-04-03T16:33:11.927Z" }, +] + [[package]] name = "pillow" version = "10.2.0" @@ -135,6 +144,7 @@ name = "scripts" version = "1.0.0" source = { virtual = "." } dependencies = [ + { name = "json5" }, { name = "pillow" }, { name = "polib" }, { name = "pydantic" }, @@ -142,6 +152,7 @@ dependencies = [ [package.metadata] requires-dist = [ + { name = "json5", specifier = ">=0.12.0" }, { name = "pillow", specifier = "==10.2.0" }, { name = "polib", specifier = ">=1.2.0" }, { name = "pydantic", specifier = ">=2.11.7" }, diff --git a/scripts/validate_jsons.py b/scripts/validate_jsons.py index 9023396..3c8e81f 100644 --- a/scripts/validate_jsons.py +++ b/scripts/validate_jsons.py @@ -1,18 +1,10 @@ -import json, logging, ast, typing +import logging, ast, typing import common, bme from common import AssetKind -import pydantic +import pydantic, json5 #region Assistant Checker -# TODO: -# If possible, following check should be done. -# They are not done now because they are so complex to implement. -# - The reference to variables and functions in programmable fields. -# - The return type of prorgammable fields. -# - Texture name referred in the programmable field in Face. -# - In instance, passed params to instance is fulfilled. - def _try_add(entries: set[str], entry: str) -> bool: if entry in entries: @@ -23,9 +15,6 @@ def _try_add(entries: set[str], entry: str) -> bool: def _check_programmable_field(probe: str) -> None: - # TODO: - # If possible, allow checking the reference to variables and function, - # to make sure the statement must can be executed. try: ast.parse(probe) except SyntaxError: @@ -178,7 +167,7 @@ def validate_jsons() -> None: # Load all prototypes and check their basic format prototypes: list[bme.Prototype] = [] - for raw_json_file in raw_jsons_dir.glob('*.json'): + for raw_json_file in raw_jsons_dir.glob('*.json5'): # Skip non-file if not raw_json_file.is_file(): continue @@ -189,12 +178,12 @@ def validate_jsons() -> None: # Load prototypes try: with open(raw_json_file, 'r', encoding='utf-8') as f: - docuement = json.load(f) + docuement = json5.load(f) file_prototypes = bme.Prototypes.model_validate(docuement) - except json.JSONDecodeError as e: - logging.error(f'File {raw_json_file} is not a valid JSON file. Reason: {e}') except pydantic.ValidationError as e: logging.error(f'JSON file {raw_json_file} lose essential fields. Detail: {e}') + except (ValueError, UnicodeDecodeError) as e: + logging.error(f'File {raw_json_file} is not a valid JSON5 file. Reason: {e}') # Append all prototypes into list prototypes += file_prototypes.root