refactor: use JSON5 instead of JSON for BME prototype.
- use JSON5 for BME prototype description file instead of JSON to make us have ability that make comment in declaration files (TBD in future). - upgrade corresponding scripts. - confirm the finish of upgrading script into modern Python.
This commit is contained in:
@ -2,12 +2,13 @@ import json, logging
|
||||
from pathlib import Path
|
||||
import common
|
||||
from common import AssetKind
|
||||
import json5
|
||||
|
||||
|
||||
def _compress_json(src_file: Path, dst_file: Path) -> None:
|
||||
# load data first
|
||||
with open(src_file, 'r', encoding='utf-8') as f:
|
||||
loaded_prototypes = json.load(f)
|
||||
loaded_prototypes = json5.load(f)
|
||||
|
||||
# save result with compress config
|
||||
with open(dst_file, 'w', encoding='utf-8') as f:
|
||||
@ -24,13 +25,14 @@ def build_jsons() -> None:
|
||||
raw_jsons_dir = common.get_raw_assets_folder(AssetKind.Jsons)
|
||||
plg_jsons_dir = common.get_plugin_assets_folder(AssetKind.Jsons)
|
||||
|
||||
for raw_json_file in raw_jsons_dir.glob('*.json'):
|
||||
for raw_json_file in raw_jsons_dir.glob('*.json5'):
|
||||
# Skip non-file.
|
||||
if not raw_json_file.is_file():
|
||||
continue
|
||||
|
||||
# Build final path
|
||||
plg_json_file = plg_jsons_dir / raw_json_file.relative_to(raw_jsons_dir)
|
||||
plg_json_file = plg_json_file.with_suffix('.json')
|
||||
|
||||
# Show message
|
||||
logging.info(f'Compressing {raw_json_file} -> {plg_json_file}')
|
||||
|
@ -1,4 +1,4 @@
|
||||
import os, typing, logging, enum
|
||||
import logging, enum, typing
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
import json, logging, typing, itertools
|
||||
import logging, typing, itertools
|
||||
from pathlib import Path
|
||||
import common, bme
|
||||
from common import AssetKind
|
||||
import pydantic, polib
|
||||
import pydantic, polib, json5
|
||||
|
||||
## YYC MARK:
|
||||
# This translation context string prefix is cpoied from UTIL_translation.py.
|
||||
@ -38,14 +38,14 @@ def _extract_json(json_file: Path) -> typing.Iterator[polib.POEntry]:
|
||||
try:
|
||||
# Read file and convert it into BME struct.
|
||||
with open(json_file, 'r', encoding='utf-8') as f:
|
||||
document = json.load(f)
|
||||
document = json5.load(f)
|
||||
prototypes = bme.Prototypes.model_validate(document)
|
||||
# Extract translation
|
||||
return itertools.chain.from_iterable(_extract_prototype(prototype) for prototype in prototypes.root)
|
||||
except json.JSONDecodeError:
|
||||
logging.error(f'Can not extract translation from {json_file} due to JSON error. Please validate it first.')
|
||||
except pydantic.ValidationError:
|
||||
logging.error(f'Can not extract translation from {json_file} due to struct error. Please validate it first.')
|
||||
except (ValueError, UnicodeDecodeError):
|
||||
logging.error(f'Can not extract translation from {json_file} due to JSON5 error. Please validate it first.')
|
||||
|
||||
# Output nothing
|
||||
return itertools.chain.from_iterable(())
|
||||
@ -70,7 +70,7 @@ def extract_jsons() -> None:
|
||||
}
|
||||
|
||||
# Iterate all prototypes and add into POT
|
||||
for raw_json_file in raw_jsons_dir.glob('*.json'):
|
||||
for raw_json_file in raw_jsons_dir.glob('*.json5'):
|
||||
# Skip non-file.
|
||||
if not raw_json_file.is_file():
|
||||
continue
|
||||
|
@ -3,6 +3,7 @@ name = "scripts"
|
||||
version = "1.0.0"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"json5>=0.12.0",
|
||||
"pillow==10.2.0",
|
||||
"polib>=1.2.0",
|
||||
"pydantic>=2.11.7",
|
||||
|
11
scripts/uv.lock
generated
11
scripts/uv.lock
generated
@ -11,6 +11,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "json5"
|
||||
version = "0.12.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/12/be/c6c745ec4c4539b25a278b70e29793f10382947df0d9efba2fa09120895d/json5-0.12.0.tar.gz", hash = "sha256:0b4b6ff56801a1c7dc817b0241bca4ce474a0e6a163bfef3fc594d3fd263ff3a", size = 51907, upload-time = "2025-04-03T16:33:13.201Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/41/9f/3500910d5a98549e3098807493851eeef2b89cdd3032227558a104dfe926/json5-0.12.0-py3-none-any.whl", hash = "sha256:6d37aa6c08b0609f16e1ec5ff94697e2cbbfbad5ac112afa05794da9ab7810db", size = 36079, upload-time = "2025-04-03T16:33:11.927Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pillow"
|
||||
version = "10.2.0"
|
||||
@ -135,6 +144,7 @@ name = "scripts"
|
||||
version = "1.0.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "json5" },
|
||||
{ name = "pillow" },
|
||||
{ name = "polib" },
|
||||
{ name = "pydantic" },
|
||||
@ -142,6 +152,7 @@ dependencies = [
|
||||
|
||||
[package.metadata]
|
||||
requires-dist = [
|
||||
{ name = "json5", specifier = ">=0.12.0" },
|
||||
{ name = "pillow", specifier = "==10.2.0" },
|
||||
{ name = "polib", specifier = ">=1.2.0" },
|
||||
{ name = "pydantic", specifier = ">=2.11.7" },
|
||||
|
@ -1,18 +1,10 @@
|
||||
import json, logging, ast, typing
|
||||
import logging, ast, typing
|
||||
import common, bme
|
||||
from common import AssetKind
|
||||
import pydantic
|
||||
import pydantic, json5
|
||||
|
||||
#region Assistant Checker
|
||||
|
||||
# TODO:
|
||||
# If possible, following check should be done.
|
||||
# They are not done now because they are so complex to implement.
|
||||
# - The reference to variables and functions in programmable fields.
|
||||
# - The return type of prorgammable fields.
|
||||
# - Texture name referred in the programmable field in Face.
|
||||
# - In instance, passed params to instance is fulfilled.
|
||||
|
||||
|
||||
def _try_add(entries: set[str], entry: str) -> bool:
|
||||
if entry in entries:
|
||||
@ -23,9 +15,6 @@ def _try_add(entries: set[str], entry: str) -> bool:
|
||||
|
||||
|
||||
def _check_programmable_field(probe: str) -> None:
|
||||
# TODO:
|
||||
# If possible, allow checking the reference to variables and function,
|
||||
# to make sure the statement must can be executed.
|
||||
try:
|
||||
ast.parse(probe)
|
||||
except SyntaxError:
|
||||
@ -178,7 +167,7 @@ def validate_jsons() -> None:
|
||||
|
||||
# Load all prototypes and check their basic format
|
||||
prototypes: list[bme.Prototype] = []
|
||||
for raw_json_file in raw_jsons_dir.glob('*.json'):
|
||||
for raw_json_file in raw_jsons_dir.glob('*.json5'):
|
||||
# Skip non-file
|
||||
if not raw_json_file.is_file():
|
||||
continue
|
||||
@ -189,12 +178,12 @@ def validate_jsons() -> None:
|
||||
# Load prototypes
|
||||
try:
|
||||
with open(raw_json_file, 'r', encoding='utf-8') as f:
|
||||
docuement = json.load(f)
|
||||
docuement = json5.load(f)
|
||||
file_prototypes = bme.Prototypes.model_validate(docuement)
|
||||
except json.JSONDecodeError as e:
|
||||
logging.error(f'File {raw_json_file} is not a valid JSON file. Reason: {e}')
|
||||
except pydantic.ValidationError as e:
|
||||
logging.error(f'JSON file {raw_json_file} lose essential fields. Detail: {e}')
|
||||
except (ValueError, UnicodeDecodeError) as e:
|
||||
logging.error(f'File {raw_json_file} is not a valid JSON5 file. Reason: {e}')
|
||||
|
||||
# Append all prototypes into list
|
||||
prototypes += file_prototypes.root
|
||||
|
Reference in New Issue
Block a user