feat: update scripts
- add meshes builder (copyer). - fix json compressor.
This commit is contained in:
@ -1,4 +1,4 @@
|
||||
# Tools
|
||||
# Scripts
|
||||
|
||||
These tool scripts is served for BBP_NG specifically. We use Astral UV with a single Python project file to manage these tools. You can browse their usage in this file.
|
||||
|
||||
@ -14,6 +14,12 @@ Compress BME prototype JSON files into smaller size.
|
||||
|
||||
Execute `uv run build_json.py`
|
||||
|
||||
## Build Meshes
|
||||
|
||||
Copy Ballance element placeholder into Blender plugin.
|
||||
|
||||
Execute `uv run build_meshes.py`
|
||||
|
||||
## Validate BME Prototype
|
||||
|
||||
Validate the correction of BME prorotype JSON files.
|
||||
|
@ -13,7 +13,7 @@ class ThumbnailBuilder():
|
||||
|
||||
def build_thumbnails(self) -> None:
|
||||
# get folder path
|
||||
root_folder = common.get_plugin_folder()
|
||||
root_folder = common.get_root_folder()
|
||||
|
||||
# prepare handler
|
||||
def folder_handler(rel_name: str, src_folder: Path, dst_folder: Path) -> None:
|
||||
|
@ -1,84 +1,44 @@
|
||||
import os, json, typing
|
||||
import bme_utils, bme_relatives, simple_po
|
||||
import json, logging
|
||||
from pathlib import Path
|
||||
import common
|
||||
from common import AssetKind
|
||||
|
||||
class JsonCompressor():
|
||||
|
||||
__mReporter: bme_utils.Reporter
|
||||
__mPoWriter: simple_po.PoWriter
|
||||
# __mValidator: bme_relatives.BMEValidator
|
||||
__mExtractor: bme_relatives.BMEExtractor
|
||||
def _compress_json(src_file: Path, dst_file: Path) -> None:
|
||||
# load data first
|
||||
with open(src_file, 'r', encoding='utf-8') as f:
|
||||
loaded_prototypes = json.load(f)
|
||||
|
||||
def __init__(self):
|
||||
self.__mReporter = bme_utils.Reporter()
|
||||
self.__mPoWriter = simple_po.PoWriter(
|
||||
os.path.join(common.get_plugin_folder(), 'i18n', 'bme.pot'),
|
||||
'BME Prototypes'
|
||||
)
|
||||
# self.__mValidator = bme_relatives.BMEValidator(self.__mReporter)
|
||||
self.__mExtractor = bme_relatives.BMEExtractor(self.__mReporter, self.__mPoWriter)
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_value, traceback):
|
||||
self.close()
|
||||
|
||||
def close(self) -> None:
|
||||
self.__mPoWriter.close()
|
||||
|
||||
def run(self) -> None:
|
||||
self.__compress_jsons()
|
||||
|
||||
def __compress_jsons(self) -> None:
|
||||
# get folder path
|
||||
root_folder: str = common.get_plugin_folder()
|
||||
|
||||
# prepare handler
|
||||
def folder_handler(rel_name: str, src_folder: str, dst_folder: str) -> None:
|
||||
# just create folder
|
||||
self.__mReporter.info(f'Creating Folder: {src_folder} -> {dst_folder}')
|
||||
os.makedirs(dst_folder, exist_ok = True)
|
||||
def file_handler(rel_name: str, src_file: str, dst_file: str) -> None:
|
||||
# skip non-json
|
||||
if not src_file.endswith('.json'): return
|
||||
# call compress func
|
||||
self.__mReporter.info(f'Processing JSON: {src_file} -> {dst_file}')
|
||||
self.__compress_json(rel_name, src_file, dst_file)
|
||||
|
||||
# call common processor
|
||||
common.common_file_migrator(
|
||||
os.path.join(root_folder, 'raw_jsons'),
|
||||
os.path.join(root_folder, 'jsons'),
|
||||
folder_handler,
|
||||
file_handler
|
||||
# save result with compress config
|
||||
with open(dst_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(
|
||||
loaded_prototypes, # loaded data
|
||||
f,
|
||||
indent=None, # no indent. the most narrow style.
|
||||
separators=(',', ':'), # also for narrow style.
|
||||
sort_keys=False, # do not sort key
|
||||
)
|
||||
|
||||
self.__mReporter.info('Building JSON done.')
|
||||
|
||||
def __compress_json(self, rel_name: str, src_file: str, dst_file: str) -> None:
|
||||
# load data first
|
||||
loaded_prototypes: typing.Any
|
||||
with open(src_file, 'r', encoding = 'utf-8') as fr:
|
||||
loaded_prototypes = json.load(fr)
|
||||
def build_jsons() -> None:
|
||||
raw_jsons_dir = common.get_raw_assets_folder(AssetKind.Jsons)
|
||||
plg_jsons_dir = common.get_plugin_assets_folder(AssetKind.Jsons)
|
||||
|
||||
# validate loaded data
|
||||
# self.__mValidator.validate(rel_name, loaded_prototypes)
|
||||
for raw_json_file in raw_jsons_dir.glob('*.json'):
|
||||
# Skip non-file.
|
||||
if not raw_json_file.is_file():
|
||||
continue
|
||||
|
||||
# extract translation
|
||||
self.__mExtractor.extract(rel_name, loaded_prototypes)
|
||||
# Build final path
|
||||
plg_json_file = plg_jsons_dir / raw_json_file.relative_to(raw_jsons_dir)
|
||||
|
||||
# save result
|
||||
with open(dst_file, 'w', encoding = 'utf-8') as fw:
|
||||
json.dump(
|
||||
loaded_prototypes, # loaded data
|
||||
fw,
|
||||
indent = None, # no indent. the most narrow style.
|
||||
separators = (',', ':'), # also for narrow style.
|
||||
sort_keys = False, # do not sort key
|
||||
)
|
||||
# Show message
|
||||
logging.info(f'Compressing {raw_json_file} -> {plg_json_file}')
|
||||
|
||||
# Compress json
|
||||
_compress_json(raw_json_file, plg_json_file)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
with JsonCompressor() as json_compressor:
|
||||
json_compressor.run()
|
||||
common.setup_logging()
|
||||
build_jsons()
|
||||
|
27
scripts/build_meshes.py
Normal file
27
scripts/build_meshes.py
Normal file
@ -0,0 +1,27 @@
|
||||
import shutil, logging
|
||||
import common
|
||||
from common import AssetKind
|
||||
|
||||
|
||||
def build_meshes() -> None:
|
||||
raw_meshes_dir = common.get_raw_assets_folder(AssetKind.Meshes)
|
||||
plg_meshes_dir = common.get_plugin_assets_folder(AssetKind.Meshes)
|
||||
|
||||
for raw_ph_file in raw_meshes_dir.glob('*.ph'):
|
||||
# Skip non-file.
|
||||
if not raw_ph_file.is_file():
|
||||
continue
|
||||
|
||||
# Build final path
|
||||
plg_ph_file = plg_meshes_dir / raw_ph_file.relative_to(raw_meshes_dir)
|
||||
|
||||
# Show message
|
||||
logging.info(f'Copying {raw_ph_file} -> {plg_ph_file}')
|
||||
|
||||
# Copy placeholder
|
||||
shutil.copyfile(raw_ph_file, plg_ph_file)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
common.setup_logging()
|
||||
build_meshes()
|
@ -1,76 +1,102 @@
|
||||
import os, typing, logging
|
||||
import os, typing, logging, enum
|
||||
from pathlib import Path
|
||||
|
||||
def get_plugin_folder() -> Path:
|
||||
"""
|
||||
Get the absolute path to plugin root folder.
|
||||
|
||||
:return: The absolute path to plugin root folder.
|
||||
def get_root_folder() -> Path:
|
||||
"""
|
||||
Get the path to the root folder of this repository.
|
||||
|
||||
:return: The absolute path to the root folder of this repository.
|
||||
"""
|
||||
return Path(__file__).resolve().parent.parent
|
||||
|
||||
def relative_to_folder(abs_path: Path, src_parent: Path, dst_parent: Path) -> Path:
|
||||
|
||||
class AssetKind(enum.StrEnum):
|
||||
Icons = 'icons'
|
||||
Jsons = 'jsons'
|
||||
Meshes = 'meshes'
|
||||
|
||||
|
||||
def get_raw_assets_folder(kind: AssetKind) -> Path:
|
||||
"""
|
||||
Rebase one path to another path.
|
||||
Get the path to the raw assets folder of given kind.
|
||||
|
||||
Give a absolute file path and folder path, and compute the relative path of given file to given folder.
|
||||
Then applied the computed relative path to another given folder path.
|
||||
Thus it seems like the file was rebased to from a folder to another folder with keeping the folder hierarchy.
|
||||
|
||||
For example, given `/path/to/file` and `/path`, it will compute relative path `to/file`.
|
||||
Then it was applied to another folder path `/new` and got `/new/to/file`.
|
||||
|
||||
:param abs_path: The absolute path to a folder or file.
|
||||
:param src_parent: The absolute path to folder which the `abs_path` will have relative path to.
|
||||
:param dst_parent: The absolute path to folder which the relative path will be applied to.
|
||||
:return: The absolute path to the raw assets folder of given kind.
|
||||
"""
|
||||
return dst_parent / (abs_path.relative_to(src_parent))
|
||||
return get_root_folder() / 'assets' / str(kind)
|
||||
|
||||
def common_file_migrator(
|
||||
from_folder: Path, to_folder: Path,
|
||||
fct_proc_folder: typing.Callable[[str, Path, Path], None],
|
||||
fct_proc_file: typing.Callable[[str, Path, Path], None]) -> None:
|
||||
|
||||
def get_plugin_assets_folder(kind: AssetKind) -> Path:
|
||||
"""
|
||||
Common file migrator used by some build script.
|
||||
Get the path to the plugin assets folder of given kind.
|
||||
|
||||
This function receive 2 absolute folder path. `from_folder` indicate the file migrated out,
|
||||
and `to_folder` indicate the file migrated in.
|
||||
`fct_proc_folder` is a function pointer from caller which handle folder migration in detail.
|
||||
`fct_proc_file` is same but handle file migration.
|
||||
|
||||
`fct_proc_folder` will receive 3 args.
|
||||
First is the name of this folder which can be shown for end user.
|
||||
Second is the source folder and third is expected dest folder.
|
||||
`fct_proc_file` is same, but receive the file path instead.
|
||||
Both of these function pointer should do the migration in detail. This function will only just iterate
|
||||
folder and give essential args and will not do any migration operations such as copying or moving.
|
||||
|
||||
:param from_folder: The folder need to be migrated.
|
||||
:param to_folder: The folder will be migrated to.
|
||||
:param fct_proc_folder: Folder migration detail handler.
|
||||
:param fct_proc_file: File migration detail handler.
|
||||
:return: The absolute path to the plugin assets folder of given kind.
|
||||
"""
|
||||
# TODO: If we have Python 3.12, use Path.walk instead of current polyfill.
|
||||
return get_root_folder() / 'bbp_ng' / str(kind)
|
||||
|
||||
# iterate from_folder folder
|
||||
for root, dirs, files in os.walk(from_folder, topdown=True):
|
||||
root = Path(root)
|
||||
|
||||
# iterate folders
|
||||
for name in dirs:
|
||||
# prepare handler args
|
||||
src_folder = root / name
|
||||
dst_folder = relative_to_folder(src_folder, from_folder, to_folder)
|
||||
# call handler
|
||||
fct_proc_folder(name, src_folder, dst_folder)
|
||||
# def relative_to_folder(abs_path: Path, src_parent: Path, dst_parent: Path) -> Path:
|
||||
# """
|
||||
# Rebase one path to another path.
|
||||
|
||||
# Give a absolute file path and folder path, and compute the relative path of given file to given folder.
|
||||
# Then applied the computed relative path to another given folder path.
|
||||
# Thus it seems like the file was rebased to from a folder to another folder with keeping the folder hierarchy.
|
||||
|
||||
# For example, given `/path/to/file` and `/path`, it will compute relative path `to/file`.
|
||||
# Then it was applied to another folder path `/new` and got `/new/to/file`.
|
||||
|
||||
# :param abs_path: The absolute path to a folder or file.
|
||||
# :param src_parent: The absolute path to folder which the `abs_path` will have relative path to.
|
||||
# :param dst_parent: The absolute path to folder which the relative path will be applied to.
|
||||
# """
|
||||
# return dst_parent / (abs_path.relative_to(src_parent))
|
||||
|
||||
|
||||
# def common_file_migrator(from_folder: Path, to_folder: Path, fct_proc_folder: typing.Callable[[str, Path, Path], None],
|
||||
# fct_proc_file: typing.Callable[[str, Path, Path], None]) -> None:
|
||||
# """
|
||||
# Common file migrator used by some build script.
|
||||
|
||||
# This function receive 2 absolute folder path. `from_folder` indicate the file migrated out,
|
||||
# and `to_folder` indicate the file migrated in.
|
||||
# `fct_proc_folder` is a function pointer from caller which handle folder migration in detail.
|
||||
# `fct_proc_file` is same but handle file migration.
|
||||
|
||||
# `fct_proc_folder` will receive 3 args.
|
||||
# First is the name of this folder which can be shown for end user.
|
||||
# Second is the source folder and third is expected dest folder.
|
||||
# `fct_proc_file` is same, but receive the file path instead.
|
||||
# Both of these function pointer should do the migration in detail. This function will only just iterate
|
||||
# folder and give essential args and will not do any migration operations such as copying or moving.
|
||||
|
||||
# :param from_folder: The folder need to be migrated.
|
||||
# :param to_folder: The folder will be migrated to.
|
||||
# :param fct_proc_folder: Folder migration detail handler.
|
||||
# :param fct_proc_file: File migration detail handler.
|
||||
# """
|
||||
# # TODO: If we have Python 3.12, use Path.walk instead of current polyfill.
|
||||
|
||||
# # iterate from_folder folder
|
||||
# for root, dirs, files in os.walk(from_folder, topdown=True):
|
||||
# root = Path(root)
|
||||
|
||||
# # iterate folders
|
||||
# for name in dirs:
|
||||
# # prepare handler args
|
||||
# src_folder = root / name
|
||||
# dst_folder = relative_to_folder(src_folder, from_folder, to_folder)
|
||||
# # call handler
|
||||
# fct_proc_folder(name, src_folder, dst_folder)
|
||||
|
||||
# # iterate files
|
||||
# for name in files:
|
||||
# # prepare handler args
|
||||
# src_file = root / name
|
||||
# dst_file = relative_to_folder(src_file, from_folder, to_folder)
|
||||
# # call handler
|
||||
# fct_proc_file(name, src_file, dst_file)
|
||||
|
||||
# iterate files
|
||||
for name in files:
|
||||
# prepare handler args
|
||||
src_file = root / name
|
||||
dst_file = relative_to_folder(src_file, from_folder, to_folder)
|
||||
# call handler
|
||||
fct_proc_file(name, src_file, dst_file)
|
||||
|
||||
def setup_logging() -> None:
|
||||
"""
|
||||
|
2
scripts/uv.lock
generated
2
scripts/uv.lock
generated
@ -131,7 +131,7 @@ wheels = [
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tools"
|
||||
name = "scripts"
|
||||
version = "1.0.0"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
|
@ -156,7 +156,7 @@ class Prototypes(RootModel):
|
||||
|
||||
|
||||
def validate_json() -> None:
|
||||
raw_json_folder = common.get_plugin_folder() / 'raw_jsons'
|
||||
raw_json_folder = common.get_root_folder() / 'raw_jsons'
|
||||
|
||||
for json_file in raw_json_folder.rglob('*.json'):
|
||||
logging.info(f'Validating {json_file} ...')
|
||||
|
Reference in New Issue
Block a user