3 Commits

Author SHA1 Message Date
54edc4dab7 chore: finish BME JSONs extractor 2025-07-24 14:51:05 +08:00
f40efb0467 chore: finish icons builder
- finish icons builder in scripts.
2025-07-24 14:00:25 +08:00
10de948a79 feat: update scripts
- add meshes builder (copyer).
- fix json compressor.
2025-07-24 10:16:58 +08:00
11 changed files with 367 additions and 350 deletions

View File

@ -1,4 +1,4 @@
# Tools
# Scripts
These tool scripts is served for BBP_NG specifically. We use Astral UV with a single Python project file to manage these tools. You can browse their usage in this file.
@ -14,10 +14,18 @@ Compress BME prototype JSON files into smaller size.
Execute `uv run build_json.py`
## Build Meshes
Copy Ballance element placeholder into Blender plugin.
Execute `uv run build_meshes.py`
## Validate BME Prototype
Validate the correction of BME prorotype JSON files.
Validation is VERY crucial. Before running anything involving BME prototype JSONs, please validate them first.
Execute `uv run validate_json.py`
## Extract BME Translation

View File

@ -1,3 +1,77 @@
import enum
from typing import Optional, Self
from typing import Optional
from pydantic import BaseModel, RootModel, Field, model_validator, ValidationError
class ShowcaseType(enum.StrEnum):
Nothing = 'none'
Floor = 'floor'
Rail = 'Rail'
Wood = 'wood'
class ShowcaseCfgType(enum.StrEnum):
Float = 'float'
Int = 'int'
Bool = 'bool'
Face = 'face'
class ShowcaseCfg(BaseModel):
field: str = Field(frozen=True, strict=True)
type: ShowcaseCfgType = Field(frozen=True)
title: str = Field(frozen=True, strict=True)
desc: str = Field(frozen=True, strict=True)
default: str = Field(frozen=True, strict=True)
class Showcase(BaseModel):
title: str = Field(frozen=True, strict=True)
icon: str = Field(frozen=True, strict=True)
type: ShowcaseType = Field(frozen=True)
cfgs: list[ShowcaseCfg] = Field(frozen=True, strict=True)
class Param(BaseModel):
field: str = Field(frozen=True, strict=True)
data: str = Field(frozen=True, strict=True)
class Var(BaseModel):
field: str = Field(frozen=True, strict=True)
data: str = Field(frozen=True, strict=True)
class Vertex(BaseModel):
skip: str = Field(frozen=True, strict=True)
data: str = Field(frozen=True, strict=True)
class Face(BaseModel):
skip: str = Field(frozen=True, strict=True)
texture: str = Field(frozen=True, strict=True)
indices: list[int] = Field(frozen=True, strict=True)
uvs: list[str] = Field(frozen=True, strict=True)
normals: Optional[list[str]] = Field(frozen=True, strict=True)
class Instance(BaseModel):
identifier: str = Field(frozen=True, strict=True)
skip: str = Field(frozen=True, strict=True)
params: dict[str, str] = Field(frozen=True, strict=True)
transform: str = Field(frozen=True, strict=True)
class Prototype(BaseModel):
identifier: str = Field(frozen=True, strict=True)
showcase: Optional[Showcase] = Field(frozen=True, strict=True)
params: list[Param] = Field(frozen=True, strict=True)
skip: str = Field(frozen=True, strict=True)
vars: list[Var] = Field(frozen=True, strict=True)
vertices: list[Vertex] = Field(frozen=True, strict=True)
faces: list[Face] = Field(frozen=True, strict=True)
instances: list[Instance] = Field(frozen=True, strict=True)
class Prototypes(RootModel):
root: list[Prototype] = Field(frozen=True, strict=True)

View File

@ -1,51 +1,53 @@
import logging
import logging, os
from pathlib import Path
import common
from common import AssetKind
import PIL, PIL.Image
# the config for thumbnail
# The HW size of thumbnail
THUMBNAIL_SIZE: int = 16
class ThumbnailBuilder():
def __init__(self):
pass
def _create_thumbnail(src_file: Path, dst_file: Path) -> None:
# open image
src_image: PIL.Image.Image = PIL.Image.open(src_file)
# create thumbnail
src_image.thumbnail((THUMBNAIL_SIZE, THUMBNAIL_SIZE))
# save to new file
src_image.save(dst_file)
def build_thumbnails(self) -> None:
# get folder path
root_folder = common.get_plugin_folder()
# prepare handler
def folder_handler(rel_name: str, src_folder: Path, dst_folder: Path) -> None:
# just create folder
logging.info(f'Creating Folder: {src_folder} -> {dst_folder}')
dst_folder.mkdir(parents=False, exist_ok=True)
def file_handler(rel_name: str, src_file: Path, dst_file: Path) -> None:
# skip non-image
if src_file.suffix != '.png': return
# call thumbnail func
logging.info(f'Building Thumbnail: {src_file} -> {dst_file}')
self.__resize_image(src_file, dst_file)
def build_icons() -> None:
raw_icons_dir = common.get_raw_assets_folder(AssetKind.Icons)
plg_icons_dir = common.get_plugin_assets_folder(AssetKind.Icons)
# call common processor
common.common_file_migrator(
root_folder / 'raw_icons',
root_folder / 'icons',
folder_handler,
file_handler
)
# TODO: If we have Python 3.12, use Path.walk instead of current polyfill.
logging.info('Building thumbnail done.')
# Icon assets has subdirectory, so we need use another way to process.
for root, dirs, files in os.walk(raw_icons_dir):
root = Path(root)
# Iterate folders
for name in dirs:
# Fetch directory path
raw_icon_subdir = root / name
plg_icon_subdir = plg_icons_dir / raw_icon_subdir.relative_to(raw_icons_dir)
# Show message
logging.info(f'Creating Folder: {raw_icon_subdir} -> {plg_icon_subdir}')
# Create directory
plg_icon_subdir.mkdir(parents=True, exist_ok=True)
# Iterate files
for name in files:
# Fetch file path
raw_icon_file = root / name
plg_icon_file = plg_icons_dir / raw_icon_file.relative_to(raw_icons_dir)
# Show message
logging.info(f'Building Thumbnail: {raw_icon_file} -> {plg_icon_file}')
# Create thumbnail
_create_thumbnail(raw_icon_file, plg_icon_file)
def __resize_image(self, src_file: Path, dst_file: Path) -> None:
# open image
src_image: PIL.Image.Image = PIL.Image.open(src_file)
# create thumbnail
src_image.thumbnail((THUMBNAIL_SIZE, THUMBNAIL_SIZE))
# save to new file
src_image.save(dst_file)
if __name__ == '__main__':
common.setup_logging()
thumbnail_builder = ThumbnailBuilder()
thumbnail_builder.build_thumbnails()
build_icons()

View File

@ -1,84 +1,44 @@
import os, json, typing
import bme_utils, bme_relatives, simple_po
import json, logging
from pathlib import Path
import common
from common import AssetKind
class JsonCompressor():
__mReporter: bme_utils.Reporter
__mPoWriter: simple_po.PoWriter
# __mValidator: bme_relatives.BMEValidator
__mExtractor: bme_relatives.BMEExtractor
def _compress_json(src_file: Path, dst_file: Path) -> None:
# load data first
with open(src_file, 'r', encoding='utf-8') as f:
loaded_prototypes = json.load(f)
def __init__(self):
self.__mReporter = bme_utils.Reporter()
self.__mPoWriter = simple_po.PoWriter(
os.path.join(common.get_plugin_folder(), 'i18n', 'bme.pot'),
'BME Prototypes'
)
# self.__mValidator = bme_relatives.BMEValidator(self.__mReporter)
self.__mExtractor = bme_relatives.BMEExtractor(self.__mReporter, self.__mPoWriter)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def close(self) -> None:
self.__mPoWriter.close()
def run(self) -> None:
self.__compress_jsons()
def __compress_jsons(self) -> None:
# get folder path
root_folder: str = common.get_plugin_folder()
# prepare handler
def folder_handler(rel_name: str, src_folder: str, dst_folder: str) -> None:
# just create folder
self.__mReporter.info(f'Creating Folder: {src_folder} -> {dst_folder}')
os.makedirs(dst_folder, exist_ok = True)
def file_handler(rel_name: str, src_file: str, dst_file: str) -> None:
# skip non-json
if not src_file.endswith('.json'): return
# call compress func
self.__mReporter.info(f'Processing JSON: {src_file} -> {dst_file}')
self.__compress_json(rel_name, src_file, dst_file)
# call common processor
common.common_file_migrator(
os.path.join(root_folder, 'raw_jsons'),
os.path.join(root_folder, 'jsons'),
folder_handler,
file_handler
# save result with compress config
with open(dst_file, 'w', encoding='utf-8') as f:
json.dump(
loaded_prototypes, # loaded data
f,
indent=None, # no indent. the most narrow style.
separators=(',', ':'), # also for narrow style.
sort_keys=False, # do not sort key
)
self.__mReporter.info('Building JSON done.')
def __compress_json(self, rel_name: str, src_file: str, dst_file: str) -> None:
# load data first
loaded_prototypes: typing.Any
with open(src_file, 'r', encoding = 'utf-8') as fr:
loaded_prototypes = json.load(fr)
def build_jsons() -> None:
raw_jsons_dir = common.get_raw_assets_folder(AssetKind.Jsons)
plg_jsons_dir = common.get_plugin_assets_folder(AssetKind.Jsons)
# validate loaded data
# self.__mValidator.validate(rel_name, loaded_prototypes)
for raw_json_file in raw_jsons_dir.glob('*.json'):
# Skip non-file.
if not raw_json_file.is_file():
continue
# extract translation
self.__mExtractor.extract(rel_name, loaded_prototypes)
# Build final path
plg_json_file = plg_jsons_dir / raw_json_file.relative_to(raw_jsons_dir)
# save result
with open(dst_file, 'w', encoding = 'utf-8') as fw:
json.dump(
loaded_prototypes, # loaded data
fw,
indent = None, # no indent. the most narrow style.
separators = (',', ':'), # also for narrow style.
sort_keys = False, # do not sort key
)
# Show message
logging.info(f'Compressing {raw_json_file} -> {plg_json_file}')
# Compress json
_compress_json(raw_json_file, plg_json_file)
if __name__ == '__main__':
with JsonCompressor() as json_compressor:
json_compressor.run()
common.setup_logging()
build_jsons()

27
scripts/build_meshes.py Normal file
View File

@ -0,0 +1,27 @@
import shutil, logging
import common
from common import AssetKind
def build_meshes() -> None:
raw_meshes_dir = common.get_raw_assets_folder(AssetKind.Meshes)
plg_meshes_dir = common.get_plugin_assets_folder(AssetKind.Meshes)
for raw_ph_file in raw_meshes_dir.glob('*.ph'):
# Skip non-file.
if not raw_ph_file.is_file():
continue
# Build final path
plg_ph_file = plg_meshes_dir / raw_ph_file.relative_to(raw_meshes_dir)
# Show message
logging.info(f'Copying {raw_ph_file} -> {plg_ph_file}')
# Copy placeholder
shutil.copyfile(raw_ph_file, plg_ph_file)
if __name__ == '__main__':
common.setup_logging()
build_meshes()

View File

@ -1,76 +1,39 @@
import os, typing, logging
import os, typing, logging, enum
from pathlib import Path
def get_plugin_folder() -> Path:
"""
Get the absolute path to plugin root folder.
:return: The absolute path to plugin root folder.
def get_root_folder() -> Path:
"""
Get the path to the root folder of this repository.
:return: The absolute path to the root folder of this repository.
"""
return Path(__file__).resolve().parent.parent
def relative_to_folder(abs_path: Path, src_parent: Path, dst_parent: Path) -> Path:
class AssetKind(enum.StrEnum):
Icons = 'icons'
Jsons = 'jsons'
Meshes = 'meshes'
def get_raw_assets_folder(kind: AssetKind) -> Path:
"""
Rebase one path to another path.
Get the path to the raw assets folder of given kind.
Give a absolute file path and folder path, and compute the relative path of given file to given folder.
Then applied the computed relative path to another given folder path.
Thus it seems like the file was rebased to from a folder to another folder with keeping the folder hierarchy.
For example, given `/path/to/file` and `/path`, it will compute relative path `to/file`.
Then it was applied to another folder path `/new` and got `/new/to/file`.
:param abs_path: The absolute path to a folder or file.
:param src_parent: The absolute path to folder which the `abs_path` will have relative path to.
:param dst_parent: The absolute path to folder which the relative path will be applied to.
:return: The absolute path to the raw assets folder of given kind.
"""
return dst_parent / (abs_path.relative_to(src_parent))
return get_root_folder() / 'assets' / str(kind)
def common_file_migrator(
from_folder: Path, to_folder: Path,
fct_proc_folder: typing.Callable[[str, Path, Path], None],
fct_proc_file: typing.Callable[[str, Path, Path], None]) -> None:
def get_plugin_assets_folder(kind: AssetKind) -> Path:
"""
Common file migrator used by some build script.
Get the path to the plugin assets folder of given kind.
This function receive 2 absolute folder path. `from_folder` indicate the file migrated out,
and `to_folder` indicate the file migrated in.
`fct_proc_folder` is a function pointer from caller which handle folder migration in detail.
`fct_proc_file` is same but handle file migration.
`fct_proc_folder` will receive 3 args.
First is the name of this folder which can be shown for end user.
Second is the source folder and third is expected dest folder.
`fct_proc_file` is same, but receive the file path instead.
Both of these function pointer should do the migration in detail. This function will only just iterate
folder and give essential args and will not do any migration operations such as copying or moving.
:param from_folder: The folder need to be migrated.
:param to_folder: The folder will be migrated to.
:param fct_proc_folder: Folder migration detail handler.
:param fct_proc_file: File migration detail handler.
:return: The absolute path to the plugin assets folder of given kind.
"""
# TODO: If we have Python 3.12, use Path.walk instead of current polyfill.
return get_root_folder() / 'bbp_ng' / str(kind)
# iterate from_folder folder
for root, dirs, files in os.walk(from_folder, topdown=True):
root = Path(root)
# iterate folders
for name in dirs:
# prepare handler args
src_folder = root / name
dst_folder = relative_to_folder(src_folder, from_folder, to_folder)
# call handler
fct_proc_folder(name, src_folder, dst_folder)
# iterate files
for name in files:
# prepare handler args
src_file = root / name
dst_file = relative_to_folder(src_file, from_folder, to_folder)
# call handler
fct_proc_file(name, src_file, dst_file)
def setup_logging() -> None:
"""

View File

@ -1,6 +0,0 @@
import common
if __name__ == '__main__':
common.setup_logging()

88
scripts/extract_jsons.py Normal file
View File

@ -0,0 +1,88 @@
import json, logging, typing, itertools
from pathlib import Path
import common, bme
from common import AssetKind
import pydantic, polib
## YYC MARK:
# This translation context string prefix is cpoied from UTIL_translation.py.
# If the context string of translation changed, please synchronize it.
CTX_TRANSLATION: str = 'BBP/BME'
def _extract_prototype(prototype: bme.Prototype) -> typing.Iterator[polib.POEntry]:
identifier = prototype.identifier
showcase = prototype.showcase
# Show message
logging.info(f'Extracting prototype {identifier}')
# Extract showcase
if showcase is None:
return
# Extract showcase title
yield polib.POEntry(msgid=showcase.title, msgstr='', msgctxt=f'{CTX_TRANSLATION}/{identifier}')
# Extract showcase entries
for i, cfg in enumerate(showcase.cfgs):
# extract title and description
yield polib.POEntry(msgid=cfg.title, msgstr='', msgctxt=f'{CTX_TRANSLATION}/{identifier}/[{i}]')
yield polib.POEntry(msgid=cfg.desc, msgstr='', msgctxt=f'{CTX_TRANSLATION}/{identifier}/[{i}]')
def _extract_json(json_file: Path) -> typing.Iterator[polib.POEntry]:
# Show message
logging.info(f'Extracting file {json_file}')
try:
# Read file and convert it into BME struct.
with open(json_file, 'r', encoding='utf-8') as f:
document = json.load(f)
prototypes = bme.Prototypes.model_validate(document)
# Extract translation
return itertools.chain.from_iterable(_extract_prototype(prototype) for prototype in prototypes.root)
except json.JSONDecodeError:
logging.error(f'Can not extract translation from {json_file} due to JSON error. Please validate it first.')
except pydantic.ValidationError:
logging.error(f'Can not extract translation from {json_file} due to struct error. Please validate it first.')
# Output nothing
return itertools.chain.from_iterable(())
def extract_jsons() -> None:
raw_jsons_dir = common.get_raw_assets_folder(AssetKind.Jsons)
# Create POT content
po = polib.POFile()
po.metadata = {
'Project-Id-Version': '1.0',
'Report-Msgid-Bugs-To': 'you@example.com',
'POT-Creation-Date': 'YEAR-MO-DA HO:MI+ZONE',
'PO-Revision-Date': 'YEAR-MO-DA HO:MI+ZONE',
'Last-Translator': 'FULL NAME <EMAIL@ADDRESS>',
'Language-Team': 'LANGUAGE <LL@li.org>',
'MIME-Version': '1.0',
'Content-Type': 'text/plain; charset=utf-8',
'Content-Transfer-Encoding': '8bit',
'X-Generator': 'polib',
}
# Iterate all prototypes and add into POT
for raw_json_file in raw_jsons_dir.glob('*.json'):
# Skip non-file.
if not raw_json_file.is_file():
continue
# Extract json and append it.
po.extend(_extract_json(raw_json_file))
# Write into POT file
pot_file = common.get_root_folder() / 'i18n' / 'bme.pot'
logging.info(f'Saving POT into {pot_file}')
po.save(str(pot_file))
if __name__ == '__main__':
common.setup_logging()
extract_jsons()

2
scripts/uv.lock generated
View File

@ -131,7 +131,7 @@ wheels = [
]
[[package]]
name = "tools"
name = "scripts"
version = "1.0.0"
source = { virtual = "." }
dependencies = [

View File

@ -1,175 +0,0 @@
import enum
import json
import logging
import ast
from typing import Optional, Self
from pydantic import BaseModel, RootModel, Field, model_validator, ValidationError
import common
def validate_programmable_str(probe: str) -> None:
try:
ast.parse(probe)
except SyntaxError:
raise ValueError(
f'String {probe} may not be a valid Python statement which is not suit for programmable field.')
class ShowcaseType(enum.StrEnum):
Nothing = 'none'
Floor = 'floor'
Rail = 'Rail'
Wood = 'wood'
class ShowcaseCfgType(enum.StrEnum):
Float = 'float'
Int = 'int'
Bool = 'bool'
Face = 'face'
class ShowcaseCfg(BaseModel):
field: str = Field(frozen=True, strict=True)
type: ShowcaseCfgType = Field(frozen=True)
title: str = Field(frozen=True, strict=True)
desc: str = Field(frozen=True, strict=True)
default: str = Field(frozen=True, strict=True)
@model_validator(mode='after')
def verify_prog_field(self) -> Self:
validate_programmable_str(self.default)
return self
class Showcase(BaseModel):
title: str = Field(frozen=True, strict=True)
icon: str = Field(frozen=True, strict=True)
type: ShowcaseType = Field(frozen=True)
cfgs: list[ShowcaseCfg] = Field(frozen=True, strict=True)
class Param(BaseModel):
field: str = Field(frozen=True, strict=True)
data: str = Field(frozen=True, strict=True)
@model_validator(mode='after')
def verify_prog_field(self) -> Self:
validate_programmable_str(self.data)
return self
class Var(BaseModel):
field: str = Field(frozen=True, strict=True)
data: str = Field(frozen=True, strict=True)
@model_validator(mode='after')
def verify_prog_field(self) -> Self:
validate_programmable_str(self.data)
return self
class Vertex(BaseModel):
skip: str = Field(frozen=True, strict=True)
data: str = Field(frozen=True, strict=True)
@model_validator(mode='after')
def verify_prog_field(self) -> Self:
validate_programmable_str(self.skip)
validate_programmable_str(self.data)
return self
class Face(BaseModel):
skip: str = Field(frozen=True, strict=True)
texture: str = Field(frozen=True, strict=True)
indices: list[int] = Field(frozen=True, strict=True)
uvs: list[str] = Field(frozen=True, strict=True)
normals: Optional[list[str]] = Field(frozen=True, strict=True)
@model_validator(mode='after')
def verify_count(self) -> Self:
expected_count = len(self.indices)
if len(self.uvs) != expected_count:
raise ValueError('The length of uv array is not matched with indices.')
if (self.normals is not None) and (len(self.normals) != expected_count):
raise ValueError('The length of normal array is not matched with indices.')
return self
@model_validator(mode='after')
def verify_prog_field(self) -> Self:
validate_programmable_str(self.skip)
validate_programmable_str(self.texture)
for i in self.uvs:
validate_programmable_str(i)
if self.normals is not None:
for i in self.normals:
validate_programmable_str(i)
return self
class Instance(BaseModel):
identifier: str = Field(frozen=True, strict=True)
skip: str = Field(frozen=True, strict=True)
params: dict[str, str] = Field(frozen=True, strict=True)
transform: str = Field(frozen=True, strict=True)
@model_validator(mode='after')
def verify_prog_field(self) -> Self:
validate_programmable_str(self.skip)
for v in self.params.values():
validate_programmable_str(v)
validate_programmable_str(self.transform)
return self
IDENTIFIERS: set[str] = set()
class Prototype(BaseModel):
identifier: str = Field(frozen=True, strict=True)
showcase: Optional[Showcase] = Field(frozen=True, strict=True)
params: list[Param] = Field(frozen=True, strict=True)
skip: str = Field(frozen=True, strict=True)
vars: list[Var] = Field(frozen=True, strict=True)
vertices: list[Vertex] = Field(frozen=True, strict=True)
faces: list[Face] = Field(frozen=True, strict=True)
instances: list[Instance] = Field(frozen=True, strict=True)
@model_validator(mode='after')
def verify_identifier(self) -> Self:
global IDENTIFIERS
if self.identifier in IDENTIFIERS:
raise ValueError(f'Identifier {self.identifier} is already registered.')
else:
IDENTIFIERS.add(self.identifier)
return self
@model_validator(mode='after')
def verify_prog_field(self) -> Self:
validate_programmable_str(self.skip)
return self
class Prototypes(RootModel):
root: list[Prototype] = Field(frozen=True, strict=True)
def validate_json() -> None:
raw_json_folder = common.get_plugin_folder() / 'raw_jsons'
for json_file in raw_json_folder.rglob('*.json'):
logging.info(f'Validating {json_file} ...')
try:
with open(json_file, 'r', encoding='utf-8') as f:
docuement = json.load(f)
Prototypes.model_validate(docuement)
except json.JSONDecodeError as e:
logging.error(f'Can not load file {json_file}. It may not a valid JSON file. Reason: {e}')
except ValidationError as e:
logging.error(f'File {json_file} is not correct. Reason: {e}')
if __name__ == '__main__':
common.setup_logging()
validate_json()

76
scripts/validate_jsons.py Normal file
View File

@ -0,0 +1,76 @@
import json, logging, ast, typing
import common, bme
from common import AssetKind
import pydantic
#region Assistant Validator
def _validate_programmable_field(probe: str) -> None:
try:
ast.parse(probe)
except SyntaxError:
logging.error(f'String {probe} may not be a valid Python statement which is not suit for programmable field.')
def _validate_showcase_icon(icon_name: str) -> None:
icon_path = common.get_raw_assets_folder(AssetKind.Icons) / 'bme' / f'{icon_name}.png'
if not icon_path.is_file():
logging.error(f'Icon value {icon_name} may not be valid because it do not existing.')
#endregion
#region Core Validator
def _validate_prototype(prototype: bme.Prototype) -> None:
pass
#endregion
# 把提取JSON翻译的要求写入到验证中
# - Showcase::Cfgs::Title或Desc不能为空。
# - Showcase::Cfgs::Title和Showcase::Cfgs::Desc不能重复
def validate_jsons() -> None:
raw_jsons_dir = common.get_raw_assets_folder(AssetKind.Jsons)
# Load all prototypes and check their basic format
prototypes: list[bme.Prototype] = []
for raw_json_file in raw_jsons_dir.glob('*.json'):
# Skip non-file
if not raw_json_file.is_file():
continue
# Show info
logging.info(f'Loading {raw_json_file}')
# Load prototypes
try:
with open(raw_json_file, 'r', encoding='utf-8') as f:
docuement = json.load(f)
file_prototypes = bme.Prototypes.model_validate(docuement)
except json.JSONDecodeError as e:
logging.error(f'File {raw_json_file} is not a valid JSON file. Reason: {e}')
except pydantic.ValidationError as e:
logging.error(f'JSON file {raw_json_file} lose essential fields. Detail: {e}')
# Append all prototypes into list
prototypes += file_prototypes.root
# Collect identifier and check identifier first.
identifiers: set[str] = set()
for prototype in prototypes:
identifier = prototype.identifier
if prototype.identifier in identifiers:
logging.error(f'Identifier {identifier} is registered more than once.')
else:
identifiers.add(identifier)
# Start custom validation
for protype in prototypes:
_validate_prototype(prototype)
if __name__ == '__main__':
common.setup_logging()
validate_jsons()