fix: improve BME extractor
- improve BMe extractor with new added classes. - I have written a half of BME validator, but I gave up now. * it takes too much time and I don't want to pay more on it. postpone it to next update. * I annotate all of BMe validator code for future implementation. * the improvement of BME json files (upgrade them to YAML format) also is postponed. - change some interface in common.py. synchronize it to other modules using it.
This commit is contained in:
parent
04aa879c22
commit
f10c273067
8
bbp_ng/tools/README.md
Normal file
8
bbp_ng/tools/README.md
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
# Tools
|
||||||
|
|
||||||
|
These tool scripts located in this directory require following dependencies (version list in content is not mandatory. I just tell you that this version definitely works. You can upgrade or downgrade it as you wish if script still works.):
|
||||||
|
|
||||||
|
```
|
||||||
|
pillow=10.2.0
|
||||||
|
termcolor=2.2.0
|
||||||
|
```
|
@ -1,6 +1,5 @@
|
|||||||
import typing
|
import typing
|
||||||
import collections
|
import simple_po, bme_utils
|
||||||
import simple_po
|
|
||||||
|
|
||||||
#region Translation Constant
|
#region Translation Constant
|
||||||
|
|
||||||
@ -60,106 +59,285 @@ TOKEN_INSTANCES_TRANSFORM: str = 'transform'
|
|||||||
|
|
||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
class Reporter():
|
# TODO: finish BME validator
|
||||||
"""
|
|
||||||
General reporter commonly used by BME validator.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
# class ReporterWithHierarchy():
|
||||||
pass
|
# """
|
||||||
|
# BME validator and extractor specifically used reporter
|
||||||
|
# which auotmatically use hierarchy as its context when outputing.
|
||||||
|
# """
|
||||||
|
|
||||||
def __report(self, type: str, msg: str, context: str | None) -> None:
|
# __mReporter: bme_utils.Reporter
|
||||||
strl: str = f'[{type}]'
|
# __mHierarchy: bme_utils.Hierarchy
|
||||||
if context is not None:
|
|
||||||
strl += f'[{context}]'
|
|
||||||
strl += ' ' + msg
|
|
||||||
print(strl)
|
|
||||||
|
|
||||||
def error(self, msg: str, context: str | None = None) -> None:
|
# def __init__(self, reporter: bme_utils.Reporter, hierarchy: bme_utils.Hierarchy):
|
||||||
"""
|
# self.__mReporter = reporter
|
||||||
@brief Report an error.
|
# self.__mHierarchy = hierarchy
|
||||||
@param[in] msg The message to show.
|
|
||||||
@param[in] context The context of this message, e.g. the file path. None if no context.
|
|
||||||
"""
|
|
||||||
self.__report('Error', msg, context)
|
|
||||||
|
|
||||||
def warning(self, msg: str, context: str | None = None) -> None:
|
# def error(self, msg: str) -> None:
|
||||||
"""
|
# self.__mReporter.error(msg, self.__mHierarchy.build_hierarchy_string())
|
||||||
@brief Report a warning.
|
# def warning(self, msg: str) -> None:
|
||||||
@param[in] msg The message to show.
|
# self.__mReporter.warning(msg, self.__mHierarchy.build_hierarchy_string())
|
||||||
@param[in] context The context of this message, e.g. the file path. None if no context.
|
# def info(self, msg: str) -> None:
|
||||||
"""
|
# self.__mReporter.info(msg, self.__mHierarchy.build_hierarchy_string())
|
||||||
self.__report('Warning', msg, context)
|
|
||||||
|
|
||||||
def info(self, msg: str, context: str | None = None) -> None:
|
# class UniqueField():
|
||||||
"""
|
# """
|
||||||
@brief Report a info.
|
# Some BME prototype fields should be unique in globl scope.
|
||||||
@param[in] msg The message to show.
|
# So BME validator should check this. That's the feature this class provided.
|
||||||
@param[in] context The context of this message, e.g. the file path. None if no context.
|
|
||||||
"""
|
|
||||||
self.__report('Info', msg, context)
|
|
||||||
|
|
||||||
class Hierarchy():
|
# This class is an abstract class and should not be used directly.
|
||||||
"""
|
# Use child class please.
|
||||||
The hierarchy builder for BME validator to build context string representing the location where error happen.
|
# """
|
||||||
And it can be utilized by BME extractor to generate the context of translation.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__mStack: collections.deque[str]
|
# __mUniques: set[str]
|
||||||
|
# __mReporter: ReporterWithHierarchy
|
||||||
|
|
||||||
def __init__(self):
|
# def __init__(self, reporter: ReporterWithHierarchy):
|
||||||
self.__mStack = collections.deque()
|
# self.__mUniques = set()
|
||||||
|
# self.__mReporter = reporter
|
||||||
|
|
||||||
def push(self, item: str) -> None:
|
# def register(self, entry: str) -> bool:
|
||||||
"""
|
# """
|
||||||
@brief Add an item into this hierarchy.
|
# @brief Try to register given entry in unique.
|
||||||
@param[in] item New added item.
|
# @details
|
||||||
"""
|
# If given entry is not presented in unique set, given entry will be inserted and return True.
|
||||||
self.__mStack.append(item)
|
# If given entry is already available in unique set, this function will use reporter to output an error message and return False.
|
||||||
|
# @param[in] entry The entry to be checked and inserted.
|
||||||
|
# @return True if entry is unique, otherwise false.
|
||||||
|
# """
|
||||||
|
# if entry in self.__mUniques:
|
||||||
|
# self.__mReporter.error(self._get_error_msg(entry))
|
||||||
|
# return False
|
||||||
|
# else:
|
||||||
|
# self.__mUniques.add(entry)
|
||||||
|
# return True
|
||||||
|
|
||||||
def push_index(self, index: int) -> None:
|
# def clear(self) -> None:
|
||||||
"""
|
# """
|
||||||
@brief Add an integral index into this hierarchy.
|
# @brief Clear this unique set for further using.
|
||||||
@details
|
# """
|
||||||
The difference between this and normal push function is that added item is integral index.
|
# self.__mUniques.clear()
|
||||||
This function will automatically convert it to string with a special format first, then push it into hierarchy.
|
|
||||||
@param[in] item New added index.
|
|
||||||
"""
|
|
||||||
self.__mStack.append(f'[{index}]')
|
|
||||||
|
|
||||||
def pop(self) -> None:
|
# def _get_error_msg(self, err_entry: str) -> str:
|
||||||
"""
|
# """
|
||||||
@brief Remove the top item from hierarchy
|
# @brief Get the error message when error occurs.
|
||||||
"""
|
# @details
|
||||||
self.__mStack.pop()
|
# This is internal used function to get the error message which will be passed to reporter.
|
||||||
|
# This message is generated by given entry which cause the non-unique issue.
|
||||||
|
# Outer caller should not call this function and every child class should override this function.
|
||||||
|
# @param[in] err_entry The entry cause the error.
|
||||||
|
# @return The error message generated from given error entry.
|
||||||
|
# """
|
||||||
|
# raise NotImplementedError()
|
||||||
|
|
||||||
def build_hierarchy_string(self) -> str:
|
# class UniqueIdentifier(UniqueField):
|
||||||
"""
|
# """Specific UniqueField for unique prototype identifier."""
|
||||||
Build the string which can represent this hierarchy.
|
# def _get_error_msg(self, err_entry: str) -> str:
|
||||||
@return The built string representing this hierarchy.
|
# return f'Trying to register multiple prototype with same name: "{err_entry}".'
|
||||||
"""
|
# class UniqueVariable(UniqueField):
|
||||||
return '/'.join(self.__mStack)
|
# """Specific UniqueField for unique variable names within prototype."""
|
||||||
|
# def _get_error_msg(self, err_entry: str) -> str:
|
||||||
|
# return f'Trying to define multiple variable with same name: "{err_entry}" in the same prototype.'
|
||||||
|
|
||||||
class BMEValidator():
|
# class BMEValidator():
|
||||||
"""
|
# """
|
||||||
The validator for BME prototype declarartions.
|
# The validator for BME prototype declarartions.
|
||||||
This validator will validate given prototype declaration JSON structure,
|
# This validator will validate given prototype declaration JSON structure,
|
||||||
to check then whether have all essential fields BME standard required and whether have any unknown fields.
|
# to check then whether have all essential fields BME standard required and whether have any unknown fields.
|
||||||
"""
|
# """
|
||||||
|
|
||||||
__mPrototypeSet: set[str]
|
# __mHierarchy: bme_utils.Hierarchy
|
||||||
__mHierarchy: Hierarchy
|
# __mReporter: ReporterWithHierarchy
|
||||||
__mReporter: Reporter
|
|
||||||
|
|
||||||
def __init__(self, reporter: Reporter):
|
# __mUniqueIdentifier: UniqueIdentifier
|
||||||
self.__mPrototypeSet = set()
|
# __mUniqueVariable: UniqueVariable
|
||||||
self.__mHierarchy = Hierarchy()
|
|
||||||
self.__mReporter = reporter
|
|
||||||
|
|
||||||
def validate(self, assoc_file: str, prototypes: typing.Any) -> None:
|
# def __init__(self, reporter: bme_utils.Reporter):
|
||||||
self.__mHierarchy.push(assoc_file)
|
# self.__mHierarchy = bme_utils.Hierarchy()
|
||||||
|
# self.__mReporter = ReporterWithHierarchy(reporter, self.__mHierarchy)
|
||||||
|
|
||||||
|
# self.__mUniqueIdentifier = UniqueIdentifier(self.__mReporter)
|
||||||
|
# self.__mUniqueVariable = UniqueVariable(self.__mReporter)
|
||||||
|
|
||||||
|
# _TCheckKey = typing.TypeVar('_TCheckKey')
|
||||||
|
# def __check_key(self, data: dict[str, typing.Any], key: str, expected_type: type[_TCheckKey]) -> _TCheckKey | None:
|
||||||
|
# """
|
||||||
|
# @brief Check the existance and tyoe of value stored in given dict and key.
|
||||||
|
# @param[in] data The dict need to be checked
|
||||||
|
# @param[in] key The key for fetching value.
|
||||||
|
# @param[in] expected_type The expected type of fetched value.
|
||||||
|
# @return None if error occurs, otherwise the value stored in given dict and key.
|
||||||
|
# """
|
||||||
|
# gotten_value = data[key]
|
||||||
|
# if gotten_value is None:
|
||||||
|
# # report no key error
|
||||||
|
# self.__mReporter.error(f'Can not find key "{key}". Did you forget it?')
|
||||||
|
# elif not isinstance(gotten_value, expected_type):
|
||||||
|
# # get the type of value
|
||||||
|
# value_type = type(gotten_value)
|
||||||
|
# # format normal error message
|
||||||
|
# err_msg: str = f'The type of value stored inside key "{key}" is incorrect. '
|
||||||
|
# err_msg += f'Expect "{expected_type.__name__}" got "{value_type.__name__}". '
|
||||||
|
# # add special note for easily confusing types
|
||||||
|
# # e.g. forget quote number (number literal are recognise as number accidently)
|
||||||
|
# if issubclass(expected_type, str) and issubclass(type(data), (int, float)):
|
||||||
|
# err_msg += 'Did you forgot quote the number?'
|
||||||
|
# # report type error
|
||||||
|
# self.__mReporter.error(err_msg)
|
||||||
|
# else:
|
||||||
|
# # no error, return value
|
||||||
|
# return gotten_value
|
||||||
|
# # error occurs, return null
|
||||||
|
# return None
|
||||||
|
|
||||||
|
# def __check_self(self, data: typing.Any, expected_type: type) -> bool:
|
||||||
|
# """
|
||||||
|
# @brief Check the type of given data.
|
||||||
|
# @return True if type matched, otherwise false.
|
||||||
|
# """
|
||||||
|
# if data is None:
|
||||||
|
# self.__mReporter.error('Data is unexpected null.')
|
||||||
|
# elif not isinstance(data, expected_type):
|
||||||
|
# # usually this function is checking list or dict, so no scenario that user forget quote literal number.
|
||||||
|
# self.__mReporter.error(f'The type of given data is not expected. Expect "{expected_type.__name__}" got "{type(data).__name__}".')
|
||||||
|
# else:
|
||||||
|
# # no error, return okey
|
||||||
|
# return True
|
||||||
|
# # error occurs, return failed
|
||||||
|
# return False
|
||||||
|
|
||||||
|
# # 按层次递归调用检查。
|
||||||
|
# # 每个层次只负责当前层次的检查。
|
||||||
|
# # 如果值为列表,字典,则在当前层次检查完其类型(容器本身,对每一项不检查),然后对每一项调用对应层次检查。
|
||||||
|
# # 如果值不是上述类型(例如整数,浮点数,字符串等),在当前层次检查。
|
||||||
|
|
||||||
|
# def validate(self, assoc_file: str, prototypes: typing.Any) -> None:
|
||||||
|
# # reset hierarchy
|
||||||
|
# self.__mHierarchy.clear()
|
||||||
|
# # start to validate
|
||||||
|
# with self.__mHierarchy.safe_push(assoc_file):
|
||||||
|
# self.__validate_prototypes(prototypes)
|
||||||
|
|
||||||
|
# def __validate_prototypes(self, prototypes: typing.Any) -> None:
|
||||||
|
# # the most outer structure must be a list
|
||||||
|
# if not self.__check_self(prototypes, list): return
|
||||||
|
# cast_prototypes = typing.cast(list[typing.Any], prototypes)
|
||||||
|
# # iterate prototype
|
||||||
|
# for prototype_index, prototype in enumerate(cast_prototypes):
|
||||||
|
# with self.__mHierarchy.safe_push(prototype_index) as layer:
|
||||||
|
# self.__validate_prototype(layer, prototype)
|
||||||
|
|
||||||
|
# def __validate_prototype(self, layer: bme_utils.HierarchyLayer, prototype: typing.Any) -> None:
|
||||||
|
# # check whether self is a dict
|
||||||
|
# if not self.__check_self(prototype, dict): return
|
||||||
|
# cast_prototype = typing.cast(dict[str, typing.Any], prototype)
|
||||||
|
|
||||||
|
# # clear unique field for each prototype
|
||||||
|
# self.__mUniqueVariable.clear()
|
||||||
|
|
||||||
|
# # check identifier
|
||||||
|
# identifier = self.__check_key(cast_prototype, TOKEN_IDENTIFIER, str)
|
||||||
|
# if identifier is not None:
|
||||||
|
# # replace hierarchy
|
||||||
|
# layer.emplace(identifier)
|
||||||
|
# # check unique
|
||||||
|
# self.__mUniqueIdentifier.register(identifier)
|
||||||
|
|
||||||
|
# # check showcase but don't use check function
|
||||||
|
# # because it is optional.
|
||||||
|
# showcase = cast_prototype[TOKEN_SHOWCASE]
|
||||||
|
# if showcase is not None:
|
||||||
|
# # we only check non-template prototype
|
||||||
|
# with self.__mHierarchy.safe_push(TOKEN_SHOWCASE):
|
||||||
|
# self.__validate_showcase(typing.cast(dict[str, typing.Any], showcase))
|
||||||
|
|
||||||
|
# # check params, vars, vertices, faces, instances
|
||||||
|
# # they are all list
|
||||||
|
# params = self.__check_key(cast_prototype, TOKEN_PARAMS, list)
|
||||||
|
# if params is not None:
|
||||||
|
# cast_params = typing.cast(list[typing.Any], params)
|
||||||
|
# with self.__mHierarchy.safe_push(TOKEN_PARAMS):
|
||||||
|
# for param_index, param in enumerate(cast_params):
|
||||||
|
# with self.__mHierarchy.safe_push(param_index):
|
||||||
|
# self.__validate_param(param)
|
||||||
|
|
||||||
|
# vars = self.__check_key(cast_prototype, TOKEN_VARS, list)
|
||||||
|
# if vars is not None:
|
||||||
|
# cast_vars = typing.cast(list[typing.Any], vars)
|
||||||
|
# with self.__mHierarchy.safe_push(TOKEN_VARS):
|
||||||
|
# for var_index, var in enumerate(cast_vars):
|
||||||
|
# with self.__mHierarchy.safe_push(var_index):
|
||||||
|
# self.__validate_var(var)
|
||||||
|
|
||||||
|
# vertices = self.__check_key(cast_prototype, TOKEN_VERTICES, list)
|
||||||
|
# if vertices is not None:
|
||||||
|
# cast_vertices = typing.cast(list[typing.Any], vertices)
|
||||||
|
# with self.__mHierarchy.safe_push(TOKEN_VERTICES):
|
||||||
|
# for vertex_index, vertex in enumerate(cast_vertices):
|
||||||
|
# with self.__mHierarchy.safe_push(vertex_index):
|
||||||
|
# self.__validate_vertex(vertex)
|
||||||
|
|
||||||
|
# faces = self.__check_key(cast_prototype, TOKEN_FACES, list)
|
||||||
|
# if faces is not None:
|
||||||
|
# cast_faces = typing.cast(list[typing.Any], faces)
|
||||||
|
# with self.__mHierarchy.safe_push(TOKEN_FACES):
|
||||||
|
# for face_index, face in enumerate(cast_faces):
|
||||||
|
# with self.__mHierarchy.safe_push(face_index):
|
||||||
|
# self.__validate_face(face)
|
||||||
|
|
||||||
|
# instances = self.__check_key(cast_prototype, TOKEN_INSTANCES, list)
|
||||||
|
# if instances is not None:
|
||||||
|
# cast_instances = typing.cast(list[typing.Any], instances)
|
||||||
|
# with self.__mHierarchy.safe_push(TOKEN_INSTANCES):
|
||||||
|
# for instance_index, instance in enumerate(cast_instances):
|
||||||
|
# with self.__mHierarchy.safe_push(instance_index):
|
||||||
|
# self.__validate_instance(instance)
|
||||||
|
|
||||||
|
# def __validate_showcase(self, showcase: dict[str, typing.Any]) -> None:
|
||||||
|
# pass
|
||||||
|
|
||||||
|
# def __validate_param(self, param: typing.Any) -> None:
|
||||||
|
# # check whether self is a dict
|
||||||
|
# if not self.__check_self(param, dict): return
|
||||||
|
# cast_param = typing.cast(dict[str, typing.Any], param)
|
||||||
|
|
||||||
|
# # check field
|
||||||
|
# field = self.__check_key(cast_param, TOKEN_PARAMS_FIELD, str)
|
||||||
|
# if field is not None:
|
||||||
|
# self.__mUniqueVariable.register(field)
|
||||||
|
|
||||||
|
# # check data
|
||||||
|
# self.__check_key(cast_param, TOKEN_PARAMS_DATA, str)
|
||||||
|
|
||||||
|
# def __validate_var(self, var: typing.Any) -> None:
|
||||||
|
# # check whether self is a dict
|
||||||
|
# if not self.__check_self(var, dict): return
|
||||||
|
# cast_var = typing.cast(dict[str, typing.Any], var)
|
||||||
|
|
||||||
|
# # check field
|
||||||
|
# field = self.__check_key(cast_var, TOKEN_VARS_FIELD, str)
|
||||||
|
# if field is not None:
|
||||||
|
# self.__mUniqueVariable.register(field)
|
||||||
|
|
||||||
|
# # check data
|
||||||
|
# self.__check_key(cast_var, TOKEN_VARS_DATA, str)
|
||||||
|
|
||||||
|
# def __validate_vertex(self, vertex: typing.Any) -> None:
|
||||||
|
# # check whether self is a dict
|
||||||
|
# if not self.__check_self(vertex, dict): return
|
||||||
|
# cast_vertex = typing.cast(dict[str, typing.Any], vertex)
|
||||||
|
|
||||||
|
# # check fields
|
||||||
|
# self.__check_key(cast_vertex, TOKEN_VERTICES_SKIP, str)
|
||||||
|
# self.__check_key(cast_vertex, TOKEN_VERTICES_DATA, str)
|
||||||
|
|
||||||
|
# def __validate_face(self, face: typing.Any) -> None:
|
||||||
|
# pass
|
||||||
|
|
||||||
|
# def __validate_instance(self, instance: typing.Any) -> None:
|
||||||
|
# pass
|
||||||
|
|
||||||
self.__mHierarchy.pop()
|
|
||||||
|
|
||||||
class BMEExtractor():
|
class BMEExtractor():
|
||||||
"""
|
"""
|
||||||
@ -175,38 +353,57 @@ class BMEExtractor():
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
__mAssocFile: str
|
__mAssocFile: str
|
||||||
__mHierarchy: Hierarchy
|
__mHierarchy: bme_utils.Hierarchy
|
||||||
|
__mReporter: bme_utils.Reporter
|
||||||
__mPoWriter: simple_po.PoWriter
|
__mPoWriter: simple_po.PoWriter
|
||||||
|
|
||||||
def __init__(self, po_writer: simple_po.PoWriter):
|
def __init__(self, reporter: bme_utils.Reporter, po_writer: simple_po.PoWriter):
|
||||||
self.__mAssocFile = ''
|
self.__mAssocFile = ''
|
||||||
self.__mHierarchy = Hierarchy()
|
self.__mHierarchy = bme_utils.Hierarchy()
|
||||||
|
self.__mReporter = reporter
|
||||||
self.__mPoWriter = po_writer
|
self.__mPoWriter = po_writer
|
||||||
|
|
||||||
|
def __add_translation(self, msg: str) -> None:
|
||||||
|
"""
|
||||||
|
@brief Convenient internal translation adder.
|
||||||
|
@details Add given message into PO file with auto generated hierarchy for translation context.
|
||||||
|
@param[in] msg The message for translating.
|
||||||
|
"""
|
||||||
|
self.__mPoWriter.add_entry(
|
||||||
|
msg,
|
||||||
|
CTX_TRANSLATION + '/' + self.__mHierarchy.build_hierarchy_string(),
|
||||||
|
# use associated file as extracted message to tell user where we extract it.
|
||||||
|
# put file name in hierarchy is not proper (file path may be changed when moving prototype between them).
|
||||||
|
self.__mAssocFile
|
||||||
|
)
|
||||||
|
|
||||||
|
def __report_duplication_error(self) -> None:
|
||||||
|
"""
|
||||||
|
@brief Convenient internal function to report duplicated translation message issue.
|
||||||
|
@details
|
||||||
|
A convenient internal used function to report issue that
|
||||||
|
the "title" field and "desc" field of the same showcase configuration entry have same content
|
||||||
|
which may cause that generated PO file is illegal.
|
||||||
|
"""
|
||||||
|
self.__mReporter.error(
|
||||||
|
'The content of "title" and "desc" can not be the same in one entry. Please modify one of them.',
|
||||||
|
self.__mAssocFile + '/' + self.__mHierarchy.build_hierarchy_string()
|
||||||
|
)
|
||||||
|
|
||||||
def extract(self, assoc_file: str, prototypes: list[dict[str, typing.Any]]) -> None:
|
def extract(self, assoc_file: str, prototypes: list[dict[str, typing.Any]]) -> None:
|
||||||
self.__mAssocFile = assoc_file
|
self.__mAssocFile = assoc_file
|
||||||
for prototype in prototypes:
|
for prototype in prototypes:
|
||||||
self.__extract_prototype(prototype)
|
self.__extract_prototype(prototype)
|
||||||
|
|
||||||
def __add_translation(self, strl: str) -> None:
|
|
||||||
self.__mPoWriter.add_entry(
|
|
||||||
strl,
|
|
||||||
CTX_TRANSLATION + '/' + self.__mHierarchy.build_hierarchy_string(),
|
|
||||||
self.__mAssocFile
|
|
||||||
)
|
|
||||||
|
|
||||||
def __extract_prototype(self, prototype: dict[str, typing.Any]) -> None:
|
def __extract_prototype(self, prototype: dict[str, typing.Any]) -> None:
|
||||||
# get identifier first
|
# get identifier first
|
||||||
identifier: str = prototype[TOKEN_IDENTIFIER]
|
identifier: str = prototype[TOKEN_IDENTIFIER]
|
||||||
self.__mHierarchy.push(identifier)
|
with self.__mHierarchy.safe_push(identifier):
|
||||||
|
|
||||||
# get showcase node and only write PO file if it is not template prototype
|
# get showcase node and only write PO file if it is not template prototype
|
||||||
showcase: dict[str, typing.Any] | None = prototype[TOKEN_SHOWCASE]
|
showcase: dict[str, typing.Any] | None = prototype[TOKEN_SHOWCASE]
|
||||||
if showcase is not None:
|
if showcase is not None:
|
||||||
self.__extract_showcase(showcase)
|
self.__extract_showcase(showcase)
|
||||||
|
|
||||||
self.__mHierarchy.pop()
|
|
||||||
|
|
||||||
def __extract_showcase(self, showcase: dict[str, typing.Any]) -> None:
|
def __extract_showcase(self, showcase: dict[str, typing.Any]) -> None:
|
||||||
# export self name first
|
# export self name first
|
||||||
self.__add_translation(showcase[TOKEN_SHOWCASE_TITLE])
|
self.__add_translation(showcase[TOKEN_SHOWCASE_TITLE])
|
||||||
@ -217,14 +414,20 @@ class BMEExtractor():
|
|||||||
self.__extract_showcase_cfg(cfg_index, cfg)
|
self.__extract_showcase_cfg(cfg_index, cfg)
|
||||||
|
|
||||||
def __extract_showcase_cfg(self, index: int, cfg: dict[str, typing.Any]) -> None:
|
def __extract_showcase_cfg(self, index: int, cfg: dict[str, typing.Any]) -> None:
|
||||||
self.__mHierarchy.push_index(index)
|
# push cfg index
|
||||||
|
with self.__mHierarchy.safe_push(index):
|
||||||
# extract field title and description
|
# extract field title and description
|
||||||
title: str = cfg[TOKEN_SHOWCASE_CFGS_TITLE]
|
title: str = cfg[TOKEN_SHOWCASE_CFGS_TITLE]
|
||||||
desc: str = cfg[TOKEN_SHOWCASE_CFGS_DESC]
|
desc: str = cfg[TOKEN_SHOWCASE_CFGS_DESC]
|
||||||
|
|
||||||
# and export them respectively
|
# check duplication error
|
||||||
|
# if "title" is equal to "desc" and they are not blank
|
||||||
|
if title == desc and title != "":
|
||||||
|
self.__report_duplication_error()
|
||||||
|
|
||||||
|
# export them respectively if they are not blank
|
||||||
|
if title != "":
|
||||||
self.__add_translation(title)
|
self.__add_translation(title)
|
||||||
|
if desc!= "":
|
||||||
self.__add_translation(desc)
|
self.__add_translation(desc)
|
||||||
|
|
||||||
self.__mHierarchy.pop()
|
|
||||||
|
144
bbp_ng/tools/bme_utils.py
Normal file
144
bbp_ng/tools/bme_utils.py
Normal file
@ -0,0 +1,144 @@
|
|||||||
|
import typing
|
||||||
|
import collections
|
||||||
|
import termcolor
|
||||||
|
|
||||||
|
class Reporter():
|
||||||
|
"""
|
||||||
|
General reporter with context support for convenient logging.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __report(self, type: str, msg: str, context: str | None, color: str) -> None:
|
||||||
|
# build message
|
||||||
|
strl: str = f'[{type}]'
|
||||||
|
if context is not None:
|
||||||
|
strl += f'[{context}]'
|
||||||
|
strl += ' ' + msg
|
||||||
|
# output with color
|
||||||
|
termcolor.cprint(strl, color)
|
||||||
|
|
||||||
|
def error(self, msg: str, context: str | None = None) -> None:
|
||||||
|
"""
|
||||||
|
@brief Report an error.
|
||||||
|
@param[in] msg The message to show.
|
||||||
|
@param[in] context The context of this message, e.g. the file path. None if no context.
|
||||||
|
"""
|
||||||
|
self.__report('Error', msg, context, 'red')
|
||||||
|
|
||||||
|
def warning(self, msg: str, context: str | None = None) -> None:
|
||||||
|
"""
|
||||||
|
@brief Report a warning.
|
||||||
|
@param[in] msg The message to show.
|
||||||
|
@param[in] context The context of this message, e.g. the file path. None if no context.
|
||||||
|
"""
|
||||||
|
self.__report('Warning', msg, context, 'yellow')
|
||||||
|
|
||||||
|
def info(self, msg: str, context: str | None = None) -> None:
|
||||||
|
"""
|
||||||
|
@brief Report a info.
|
||||||
|
@param[in] msg The message to show.
|
||||||
|
@param[in] context The context of this message, e.g. the file path. None if no context.
|
||||||
|
"""
|
||||||
|
self.__report('Info', msg, context, 'white')
|
||||||
|
|
||||||
|
class Hierarchy():
|
||||||
|
"""
|
||||||
|
The hierarchy for BME validator and BME extractor.
|
||||||
|
In BME validator, it build human-readable string representing the location where error happen.
|
||||||
|
In BME extractor, it build the string used as the context of translation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__mStack: collections.deque[str]
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.__mStack = collections.deque()
|
||||||
|
|
||||||
|
def push(self, item: str | int) -> None:
|
||||||
|
"""
|
||||||
|
@brief Add an item into the top of this hierarchy.
|
||||||
|
@details
|
||||||
|
If given item is string, it will be push into hierarchy directly.
|
||||||
|
If given item is integer, this function will treat it as a special case, the index.
|
||||||
|
Function will push it into hierarchy after formatting it (add a pair of bracket around it).
|
||||||
|
@param[in] item New added item.
|
||||||
|
"""
|
||||||
|
if isinstance(item, str):
|
||||||
|
self.__mStack.append(item)
|
||||||
|
elif isinstance(item, int):
|
||||||
|
self.__mStack.append(f'[{item}]')
|
||||||
|
else:
|
||||||
|
raise Exception('Unexpected type of item when pushing into hierarchy.')
|
||||||
|
|
||||||
|
def pop(self) -> None:
|
||||||
|
"""
|
||||||
|
@brief Remove the top item from hierarchy
|
||||||
|
"""
|
||||||
|
self.__mStack.pop()
|
||||||
|
|
||||||
|
def safe_push(self, item: str | int) -> 'HierarchyLayer':
|
||||||
|
"""
|
||||||
|
@brief The safe version of push function.
|
||||||
|
@return A with-context-supported instance which can make sure pushed item popped when leaving scope.
|
||||||
|
"""
|
||||||
|
return HierarchyLayer(self, item)
|
||||||
|
|
||||||
|
def clear(self) -> None:
|
||||||
|
"""
|
||||||
|
@brief Clear this hierarchy.
|
||||||
|
"""
|
||||||
|
self.__mStack.clear()
|
||||||
|
|
||||||
|
def depth(self) -> int:
|
||||||
|
"""
|
||||||
|
@brief Return the depth of this hierarchy.
|
||||||
|
@return The depth of this hierarchy.
|
||||||
|
"""
|
||||||
|
return len(self.__mStack)
|
||||||
|
|
||||||
|
def build_hierarchy_string(self) -> str:
|
||||||
|
"""
|
||||||
|
@brief Build the string which can represent this hierarchy.
|
||||||
|
@details It just join every items with `/` as separator.
|
||||||
|
@return The built string representing this hierarchy.
|
||||||
|
"""
|
||||||
|
return '/'.join(self.__mStack)
|
||||||
|
|
||||||
|
class HierarchyLayer():
|
||||||
|
"""
|
||||||
|
An with-context-supported class for Hierarchy which can automatically pop item when leaving scope.
|
||||||
|
This is convenient for keeping the balance of Hierarchy (avoid programmer accidently forgetting to pop item).
|
||||||
|
"""
|
||||||
|
|
||||||
|
__mHasPop: bool
|
||||||
|
__mAssocHierarchy: Hierarchy
|
||||||
|
|
||||||
|
def __init__(self, assoc_hierarchy: Hierarchy, item: str | int):
|
||||||
|
self.__mAssocHierarchy = assoc_hierarchy
|
||||||
|
self.__mHasPop = False
|
||||||
|
self.__mAssocHierarchy.push(item)
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_value, traceback):
|
||||||
|
self.close()
|
||||||
|
|
||||||
|
def close(self) -> None:
|
||||||
|
if not self.__mHasPop:
|
||||||
|
self.__mAssocHierarchy.pop()
|
||||||
|
self.__mHasPop = True
|
||||||
|
|
||||||
|
def emplace(self, new_item: str | int) -> None:
|
||||||
|
"""
|
||||||
|
@brief Replace the content of top item in-place.
|
||||||
|
@details
|
||||||
|
In some cases, caller need to replace the content of top item.
|
||||||
|
For example, at the beginning, we only have index info.
|
||||||
|
After validating something, we can fetching a more human-readable info, such as name,
|
||||||
|
now we need replace the content of top item.
|
||||||
|
@param[in] new_item The new content of top item.
|
||||||
|
"""
|
||||||
|
self.__mAssocHierarchy.pop()
|
||||||
|
self.__mAssocHierarchy.push(new_item)
|
@ -1,33 +1,36 @@
|
|||||||
import os
|
import os
|
||||||
|
import bme_utils
|
||||||
import common
|
import common
|
||||||
import PIL, PIL.Image
|
import PIL, PIL.Image
|
||||||
|
|
||||||
# the config for thumbnail
|
# the config for thumbnail
|
||||||
g_ThumbnailSize: int = 16
|
g_ThumbnailSize: int = 16
|
||||||
|
|
||||||
def resize_image(src_file: str, dst_file: str) -> None:
|
class ThumbnailCreator():
|
||||||
# open image
|
|
||||||
src_image: PIL.Image.Image = PIL.Image.open(src_file)
|
|
||||||
# create thumbnail
|
|
||||||
src_image.thumbnail((g_ThumbnailSize, g_ThumbnailSize))
|
|
||||||
# save to new file
|
|
||||||
src_image.save(dst_file)
|
|
||||||
|
|
||||||
def create_thumbnails() -> None:
|
__mReporter: bme_utils.Reporter
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.__mReporter = bme_utils.Reporter()
|
||||||
|
|
||||||
|
def run(self) -> None:
|
||||||
|
self.__create_thumbnails()
|
||||||
|
|
||||||
|
def __create_thumbnails(self) -> None:
|
||||||
# get folder path
|
# get folder path
|
||||||
root_folder: str = common.get_plugin_folder()
|
root_folder: str = common.get_plugin_folder()
|
||||||
|
|
||||||
# prepare handler
|
# prepare handler
|
||||||
def folder_handler(src_folder: str, dst_folder: str) -> None:
|
def folder_handler(rel_name: str, src_folder: str, dst_folder: str) -> None:
|
||||||
# just create folder
|
# just create folder
|
||||||
print(f'Creating Folder: {src_folder} -> {dst_folder}')
|
self.__mReporter.info(f'Creating Folder: {src_folder} -> {dst_folder}')
|
||||||
os.makedirs(dst_folder, exist_ok = True)
|
os.makedirs(dst_folder, exist_ok = True)
|
||||||
def file_handler(src_file: str, dst_file: str) -> None:
|
def file_handler(rel_name: str, src_file: str, dst_file: str) -> None:
|
||||||
# skip non-image
|
# skip non-image
|
||||||
if not src_file.endswith('.png'): return
|
if not src_file.endswith('.png'): return
|
||||||
# call thumbnail func
|
# call thumbnail func
|
||||||
print(f'Processing Thumbnail: {src_file} -> {dst_file}')
|
self.__mReporter.info(f'Processing Thumbnail: {src_file} -> {dst_file}')
|
||||||
resize_image(src_file, dst_file)
|
self.__resize_image(src_file, dst_file)
|
||||||
|
|
||||||
# call common processor
|
# call common processor
|
||||||
common.common_file_migrator(
|
common.common_file_migrator(
|
||||||
@ -37,7 +40,16 @@ def create_thumbnails() -> None:
|
|||||||
file_handler
|
file_handler
|
||||||
)
|
)
|
||||||
|
|
||||||
print('Done.')
|
self.__mReporter.info('Building thumbnail done.')
|
||||||
|
|
||||||
|
def __resize_image(self, src_file: str, dst_file: str) -> None:
|
||||||
|
# open image
|
||||||
|
src_image: PIL.Image.Image = PIL.Image.open(src_file)
|
||||||
|
# create thumbnail
|
||||||
|
src_image.thumbnail((g_ThumbnailSize, g_ThumbnailSize))
|
||||||
|
# save to new file
|
||||||
|
src_image.save(dst_file)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
create_thumbnails()
|
thumbnail_creator = ThumbnailCreator()
|
||||||
|
thumbnail_creator.run()
|
||||||
|
@ -1,48 +1,22 @@
|
|||||||
import os, json, typing
|
import os, json, typing
|
||||||
import bme_relatives, simple_po
|
import bme_utils, bme_relatives, simple_po
|
||||||
import common
|
import common
|
||||||
|
|
||||||
def create_compressed_jsons() -> None:
|
|
||||||
# get folder path
|
|
||||||
root_folder: str = common.get_plugin_folder()
|
|
||||||
|
|
||||||
# prepare handler
|
|
||||||
def folder_handler(src_folder: str, dst_folder: str) -> None:
|
|
||||||
# just create folder
|
|
||||||
print(f'Creating Folder: {src_folder} -> {dst_folder}')
|
|
||||||
os.makedirs(dst_folder, exist_ok = True)
|
|
||||||
def file_handler(src_file: str, dst_file: str) -> None:
|
|
||||||
# skip non-json
|
|
||||||
if not src_file.endswith('.json'): return
|
|
||||||
# call compress func
|
|
||||||
print(f'Processing Json: {src_file} -> {dst_file}')
|
|
||||||
compress_json(src_file, dst_file)
|
|
||||||
|
|
||||||
# call common processor
|
|
||||||
common.common_file_migrator(
|
|
||||||
os.path.join(root_folder, 'raw_jsons'),
|
|
||||||
os.path.join(root_folder, 'jsons'),
|
|
||||||
folder_handler,
|
|
||||||
file_handler
|
|
||||||
)
|
|
||||||
|
|
||||||
print('Done.')
|
|
||||||
|
|
||||||
class JsonCompressor():
|
class JsonCompressor():
|
||||||
|
|
||||||
__mReporter: bme_relatives.Reporter
|
__mReporter: bme_utils.Reporter
|
||||||
__mPoWriter: simple_po.PoWriter
|
__mPoWriter: simple_po.PoWriter
|
||||||
__mValidator: bme_relatives.BMEValidator
|
# __mValidator: bme_relatives.BMEValidator
|
||||||
__mExtractor: bme_relatives.BMEExtractor
|
__mExtractor: bme_relatives.BMEExtractor
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.__mReporter = bme_relatives.Reporter()
|
self.__mReporter = bme_utils.Reporter()
|
||||||
self.__mPoWriter = simple_po.PoWriter(
|
self.__mPoWriter = simple_po.PoWriter(
|
||||||
os.path.join(common.get_plugin_folder(), 'i18n', 'bme.pot'),
|
os.path.join(common.get_plugin_folder(), 'i18n', 'bme.pot'),
|
||||||
'BME Prototypes'
|
'BME Prototypes'
|
||||||
)
|
)
|
||||||
self.__mValidator = bme_relatives.BMEValidator(self.__mReporter)
|
# self.__mValidator = bme_relatives.BMEValidator(self.__mReporter)
|
||||||
self.__mExtractor = bme_relatives.BMEExtractor(self.__mPoWriter)
|
self.__mExtractor = bme_relatives.BMEExtractor(self.__mReporter, self.__mPoWriter)
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
return self
|
return self
|
||||||
@ -61,16 +35,16 @@ class JsonCompressor():
|
|||||||
root_folder: str = common.get_plugin_folder()
|
root_folder: str = common.get_plugin_folder()
|
||||||
|
|
||||||
# prepare handler
|
# prepare handler
|
||||||
def folder_handler(src_folder: str, dst_folder: str) -> None:
|
def folder_handler(rel_name: str, src_folder: str, dst_folder: str) -> None:
|
||||||
# just create folder
|
# just create folder
|
||||||
self.__mReporter.info(f'Creating Folder: {src_folder} -> {dst_folder}')
|
self.__mReporter.info(f'Creating Folder: {src_folder} -> {dst_folder}')
|
||||||
os.makedirs(dst_folder, exist_ok = True)
|
os.makedirs(dst_folder, exist_ok = True)
|
||||||
def file_handler(src_file: str, dst_file: str) -> None:
|
def file_handler(rel_name: str, src_file: str, dst_file: str) -> None:
|
||||||
# skip non-json
|
# skip non-json
|
||||||
if not src_file.endswith('.json'): return
|
if not src_file.endswith('.json'): return
|
||||||
# call compress func
|
# call compress func
|
||||||
self.__mReporter.info(f'Processing Json: {src_file} -> {dst_file}')
|
self.__mReporter.info(f'Processing JSON: {src_file} -> {dst_file}')
|
||||||
self.__compress_json(src_file, dst_file)
|
self.__compress_json(rel_name, src_file, dst_file)
|
||||||
|
|
||||||
# call common processor
|
# call common processor
|
||||||
common.common_file_migrator(
|
common.common_file_migrator(
|
||||||
@ -80,19 +54,19 @@ class JsonCompressor():
|
|||||||
file_handler
|
file_handler
|
||||||
)
|
)
|
||||||
|
|
||||||
self.__mReporter.info('Done.')
|
self.__mReporter.info('Building JSON done.')
|
||||||
|
|
||||||
def __compress_json(self, src_file: str, dst_file: str) -> None:
|
def __compress_json(self, rel_name: str, src_file: str, dst_file: str) -> None:
|
||||||
# load data first
|
# load data first
|
||||||
loaded_prototypes: typing.Any
|
loaded_prototypes: typing.Any
|
||||||
with open(src_file, 'r', encoding = 'utf-8') as fr:
|
with open(src_file, 'r', encoding = 'utf-8') as fr:
|
||||||
loaded_prototypes = json.load(fr)
|
loaded_prototypes = json.load(fr)
|
||||||
|
|
||||||
# validate loaded data
|
# validate loaded data
|
||||||
self.__mValidator.validate(os.path.basename(src_file), loaded_prototypes)
|
# self.__mValidator.validate(rel_name, loaded_prototypes)
|
||||||
|
|
||||||
# extract translation
|
# extract translation
|
||||||
self.__mExtractor.extract(os.path.basename(src_file), loaded_prototypes)
|
self.__mExtractor.extract(rel_name, loaded_prototypes)
|
||||||
|
|
||||||
# save result
|
# save result
|
||||||
with open(dst_file, 'w', encoding = 'utf-8') as fw:
|
with open(dst_file, 'w', encoding = 'utf-8') as fw:
|
||||||
|
@ -27,8 +27,8 @@ def relative_to_folder(abs_path: str, src_parent: str, dst_parent: str) -> str:
|
|||||||
|
|
||||||
def common_file_migrator(
|
def common_file_migrator(
|
||||||
from_folder: str, to_folder: str,
|
from_folder: str, to_folder: str,
|
||||||
fct_proc_folder: typing.Callable[[str, str], None],
|
fct_proc_folder: typing.Callable[[str, str, str], None],
|
||||||
fct_proc_file: typing.Callable[[str, str], None]) -> None:
|
fct_proc_file: typing.Callable[[str, str, str], None]) -> None:
|
||||||
"""
|
"""
|
||||||
Common file migrator used by some build script.
|
Common file migrator used by some build script.
|
||||||
|
|
||||||
@ -37,7 +37,9 @@ def common_file_migrator(
|
|||||||
`fct_proc_folder` is a function pointer from caller which handle folder migration in detail.
|
`fct_proc_folder` is a function pointer from caller which handle folder migration in detail.
|
||||||
`fct_proc_file` is same but handle file migration.
|
`fct_proc_file` is same but handle file migration.
|
||||||
|
|
||||||
`fct_proc_folder` will receive 2 args. First is the source folder. Second is expected dest folder.
|
`fct_proc_folder` will receive 2 args.
|
||||||
|
First is a relative path presenting the folder we are processing which is usually used for printing to user.
|
||||||
|
Second is the source folder. Third is expected dest folder.
|
||||||
`fct_proc_file` is same, but receive the file path instead.
|
`fct_proc_file` is same, but receive the file path instead.
|
||||||
Both of these function pointer should do the migration in detail. This function will only just iterate
|
Both of these function pointer should do the migration in detail. This function will only just iterate
|
||||||
folder and give essential args and will not do any migration operations such as copying or moving.
|
folder and give essential args and will not do any migration operations such as copying or moving.
|
||||||
@ -55,11 +57,11 @@ def common_file_migrator(
|
|||||||
src_folder: str = os.path.join(root, name)
|
src_folder: str = os.path.join(root, name)
|
||||||
dst_folder: str = relative_to_folder(src_folder, from_folder, to_folder)
|
dst_folder: str = relative_to_folder(src_folder, from_folder, to_folder)
|
||||||
# call handler
|
# call handler
|
||||||
fct_proc_folder(src_folder, dst_folder)
|
fct_proc_folder(name, src_folder, dst_folder)
|
||||||
# iterate files
|
# iterate files
|
||||||
for name in files:
|
for name in files:
|
||||||
# prepare handler args
|
# prepare handler args
|
||||||
src_file: str = os.path.join(root, name)
|
src_file: str = os.path.join(root, name)
|
||||||
dst_file: str = relative_to_folder(src_file, from_folder, to_folder)
|
dst_file: str = relative_to_folder(src_file, from_folder, to_folder)
|
||||||
# call handler
|
# call handler
|
||||||
fct_proc_file(src_file, dst_file)
|
fct_proc_file(name, src_file, dst_file)
|
||||||
|
Loading…
Reference in New Issue
Block a user