diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml
index 5fb43c7..c84bc8c 100644
--- a/.github/FUNDING.yml
+++ b/.github/FUNDING.yml
@@ -1,3 +1,4 @@
custom:
+ - 'https://www.paypal.com/paypalme/Airkk426'
- 'https://afdian.com/a/Eric_Joker'
- 'https://github.com/Eric-Joker/Enigmata/blob/main/docs/receiving%20code.png'
diff --git a/README.md b/README.md
index b2fd461..6549117 100644
--- a/README.md
+++ b/README.md
@@ -82,7 +82,11 @@ Under certain conditions, you may be required to update the Vanilla Data. You ne
## Donation
-Because I am still a minor, the policies in my area make it difficult for me to get money here. 😭
+[PayPal](https://www.paypal.com/paypalme/Airkk426)
+
+[afdian](https://afdian.com/a/Eric_Joker)
+
+Thank you very much for your willingness to support the author. Your avatar will appear on my Github profile and in the Readme of this project.
## Thanks
diff --git a/config.yaml b/config.yaml
index d925248..3ac5000 100644
--- a/config.yaml
+++ b/config.yaml
@@ -21,7 +21,7 @@ packs:
# 为混淆时提供独特的命名空间,需与 `path` 一一对应
namespace: rhp
# 资源包目录下但不属于资源包的文件
- excluded_files:
+ exclude_files:
- '.github/**'
- '.git/**'
- '.git*'
@@ -51,7 +51,7 @@ obfuscator:
json_funs:
# 依据自然顺序排序 Json 键值
sort: true
- # 将 Json 键值中的字符串转为 Unicode,尽管是 Ascll 字符。遵循 `excluded_jsonui_names` 和 `excluded_entity_names`
+ # 将 Json 键值中的字符串转为 Unicode,尽管是 Ascll 字符。遵循 `exclude_jsonui_names` 和 `exclude_entity_names`
unicode: true
# 在 json 结尾处添加无意义空字典“{}”,不影响游戏解析但可能会影响 IDE 解析
empty_dict: true
@@ -128,7 +128,7 @@ obfuscator:
debug: false
# 不进行任何处理的 json
- excluded_jsons:
+ exclude_jsons:
- manifest.json
- "**/loading_messages.json"
- "**/blocks.json"
@@ -141,14 +141,18 @@ obfuscator:
- "**/languages.json"
- "**/splashes.json"
+ # 不进行混淆或添加水印的贴图文件名
+ exclude_image_names:
+ - item_cell_rhp
+
# 不进行混淆、添加注释、转义、去格式化并且将顺序提前的 UI 一级控件、变量、绑定
- excluded_jsonui_names:
+ exclude_jsonui_names:
- hotbar_grid_template
- setting_options_panel
- chest_label_panel
# 不在实体、模型、粒子、材质、动画、动画控制器、渲染控制器中进行混淆、转义、去格式化的键名和的 ID 及 Molang 变量名
- excluded_entity_names:
+ exclude_entity_names:
- number_0
- number_1
- number_2
@@ -181,4 +185,3 @@ obfuscator:
- health_display_hide
- health_display_baby
- health_display_controller
- - item_cell_rhp
diff --git a/config/base.py b/config/base.py
index 4fd1609..d31d491 100644
--- a/config/base.py
+++ b/config/base.py
@@ -40,7 +40,7 @@ class EnigmataConfig:
HEADER_VERSION = []
MODULES_UUID = []
MODULES_VERSION = []
- EXCLUDED_FILES = (".git*/**", ".mc*", "LICENSE", "README.md")
+ EXCLUDE_FILES = (".git*/**", ".mc*", "LICENSE", "README.md")
NOMEDIA = True
ZIP_NAME = []
NAMESPACE = []
@@ -67,7 +67,7 @@ class EnigmataConfig:
PACK_COMPRESS = 9
MTIME = (1989, 8, 10, 11, 45, 14)
DEBUG = False
- EXCLUDED_JSONS = (
+ EXCLUDE_JSONS = (
"manifest.json",
"**/loading_messages.json",
"**/blocks.json",
@@ -80,8 +80,9 @@ class EnigmataConfig:
"**/languages.json",
"**/splashes.json",
)
- EXCLUDED_JSONUI_NAMES = set()
- EXCLUDED_ENTITY_NAMES = set()
+ EXCLUDE_IMAGE_NAMES = set()
+ EXCLUDE_JSONUI_NAMES = set()
+ EXCLUDE_ENTITY_NAMES = set()
def __init__(self):
self._add_arguments()
@@ -129,7 +130,7 @@ def _add_arguments(self):
argsGroup1.add_argument("--modules-version", nargs="*", type=str, help="For manifest.json.")
argsGroup2 = parser.add_argument_group("Script Parameters")
argsGroup2.add_argument(
- "--excluded-files",
+ "--exclude-files",
nargs="*",
type=str,
help="Files in the resource pack directory but not part of the resource pack.",
@@ -159,7 +160,7 @@ def _add_arguments(self):
"--obfuscate-strs", "-s", nargs="*", type=str, help="Character pool for generating obfuscated strings."
)
argsGroup3.add_argument(
- "--obfuscate-ascll", "-a", nargs="*", type=str, help="Character pool for generating obfuscated strings."
+ "--obfuscate-ascll", "-a", nargs="*", type=str, help="Entity key-value obfuscation charset."
)
argsGroup3.add_argument("--sort", type=str2bool, help="Sort JSON keys in natural order.")
argsGroup3.add_argument(
@@ -204,7 +205,7 @@ def _add_arguments(self):
"--watermark-paths",
nargs="*",
type=str,
- help="Adds a string randomly split by namespace to all mapping filenames.",
+ help="Watermark targets.",
)
argsGroup3.add_argument(
"--wm-references",
@@ -242,22 +243,28 @@ def _add_arguments(self):
help="Modify the mtime of each file during packaging.",
)
argsGroup3.add_argument(
- "--excluded-jsons",
+ "--exclude-jsons",
nargs="*",
type=str,
help="Json without any processing.",
)
argsGroup3.add_argument(
- "--excluded-jsonui-names",
+ "--exclude-image-names",
nargs="*",
type=str,
- help="JsonUI first-level control names, variable names, hard-bound names without obfuscation, annotation, escaping, de-formatting, and order advancement.",
+ help="Texture filenames that are not obfuscated or watermarked.",
)
argsGroup3.add_argument(
- "--excluded-entity-names",
+ "--exclude-jsonui-names",
nargs="*",
type=str,
- help="Key names and names of entity series ID, and molang variable names in them that are not obfuscated, escaped, or de-formatted in files.",
+ help="JsonUI first-level control names, variable names, binding names without obfuscation, annotation, escaping, de-formatting, and order advancement.",
+ )
+ argsGroup3.add_argument(
+ "--exclude-entity-names",
+ nargs="*",
+ type=str,
+ help="Key names and names of entity series ID, and molang variable names without obfuscation, escaping, or de-formatting.",
)
self.args = parser.parse_args()
@@ -327,19 +334,22 @@ def reload(self, file: str):
# typecasting and supplementary attrs
self.mtime = tuple(self.mtime) if self.mtime else self.MTIME
- self.excluded_files = tuple(self.excluded_files) if self.excluded_files else self.EXCLUDED_FILES
+ self.exclude_files = tuple(self.exclude_files) if self.exclude_files else self.EXCLUDE_FILES
self.obfuscate_strs = tuple(self.obfuscate_strs) if self.obfuscate_strs else self.OBFUSCATE_STRS
self.obfuscate_ascll = tuple(self.obfuscate_ascll) if self.obfuscate_ascll else self.OBFUSCATE_ASCLL
self.watermark_paths = tuple(self.watermark_paths)
self.wm_references = tuple(self.wm_references)
self.obfuscate_paths = tuple(self.obfuscate_paths)
self.obf_references = tuple(self.obf_references)
- self.excluded_jsons = tuple(self.excluded_jsons) if self.excluded_jsons else self.EXCLUDED_JSONS
- self.excluded_jsonui_names = (
- set(self.excluded_jsonui_names) if self.excluded_jsonui_names else self.EXCLUDED_JSONUI_NAMES
+ self.exclude_jsons = tuple(self.exclude_jsons) if self.exclude_jsons else self.EXCLUDE_JSONS
+ self.exclude_image_names = (
+ set(self.exclude_image_names) if self.exclude_image_names else self.EXCLUDE_IMAGE_NAMES
+ )
+ self.exclude_jsonui_names = (
+ set(self.exclude_jsonui_names) if self.exclude_jsonui_names else self.EXCLUDE_JSONUI_NAMES
)
- self.excluded_entity_names = (
- set(self.excluded_entity_names) if self.excluded_entity_names else self.EXCLUDED_ENTITY_NAMES
+ self.exclude_entity_names = (
+ set(self.exclude_entity_names) if self.exclude_entity_names else self.EXCLUDE_ENTITY_NAMES
)
self.additional_jsonui = tuple(self.additional_jsonui) if self.additional_jsonui else self.ADDITIONAL_JSONUI
self.path = self.path if isinstance(self.path, list) else (self.path,)
@@ -357,9 +367,9 @@ def reload(self, file: str):
self.unicode = False
self.sort = False
self.is_vanilla_data_needed = (self.obfuscate_jsonui or self.obfuscate_entity) and not self.extract
- self.excluded_jsonui_names.add("namespace")
- self.excluded_entity_names.update(("player.base", "format_version", "version"))
- self.excluded_names = self.excluded_jsonui_names | self.excluded_entity_names
+ self.exclude_jsonui_names.add("namespace")
+ self.exclude_entity_names.update(("player.base", "format_version", "version"))
+ self.exclude_names = self.exclude_jsonui_names | self.exclude_entity_names
uuid_pattern = re.compile("^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$")
if self.pack_name and len(self.path) != len(self.pack_name):
raise ValueError("The pack name needs to correspond to the resource package.")
diff --git a/config_example.yaml b/config_example.yaml
index 7c15c56..3a19b15 100644
--- a/config_example.yaml
+++ b/config_example.yaml
@@ -12,26 +12,27 @@ log:
path: '.\logs'
packs:
- # for resource packs that need to be obfuscated
+ # Resource packs that require obfuscation
# If you need to use the PNG compression feature, please ensure the path does not contain non-ASCII characters.
path:
- ''
# The final packaged file name must correspond to the path one-to-one.
- # If it is an empty string, it will not be automatically packaged.
+ # Empty string disables auto-packaging.
zip_name:
- ''
# Provide a unique namespace for obfuscation, which must correspond to the path one-to-one.
namespace:
- rhp
# Files under the RP directory that do not belong to the RP.
- excluded_files:
+ exclude_files:
- '.github/**'
- '.git/**'
- '.git*'
- '.mc*'
- 'LICENSE'
- 'README.md'
- # Modify `manifest.json`, the values here must correspond one-to-one with `path`. If the value is False or an empty string, it will not be modified.
+ # For `manifest.json` bindings, values must match path 1:1.
+ # Setting as False/empty skips modification.
manifest:
name:
- ''
@@ -55,7 +56,7 @@ obfuscator:
- '0Oo°Οο⁰₀○。〇︒0Oo'
#- '0OoΟο○0Oo'
- # Characters used to generate obfuscated strings for entity series keys and values.
+ # Entity series keys/values obfuscation character set
# Must be all ASCII characters.
obfuscate_ascll:
- 'abcdefghijklmn'
@@ -63,7 +64,7 @@ obfuscator:
json_funs:
# Sort JSON keys in natural order.
sort: true
- # Convert both the keys and values in JSON to Unicode, even if they are ASCII characters. Follow `excluded_jsonui_names` and `excluded_entity_names`.
+ # Convert both the keys and values in JSON to Unicode, even if they are ASCII characters. Follow `exclude_jsonui_names` and `exclude_entity_names`.
unicode: true
# Add a meaningless empty dict `{}` at the end of the JSON. This will not affect game parsing but may affect IDE parsing.
empty_dict: true
@@ -74,7 +75,7 @@ obfuscator:
# Merge the JSON files under /ui/`namespace` into a single file.
merge_jsonui:
enable: true
- # The path of the merged file.
+ # Merged file path
path: 'font\.test.png'
# Add misleading key-value pairs to `_ui_defs.json`.
# It must be a JSON dict string or an empty string.
@@ -90,18 +91,18 @@ obfuscator:
# -
file_funs:
- # Add a randomly segmented string from the `namespace` to the filenames of all texture files in `paths` not present in vanilla.
+ # Insert randomized `namespace` fragments into texture filenames of non-vanilla `paths`
filename_watermark:
enable: true
paths:
- '**/textures/blocks/**'
- '**/textures/items/**'
- # JSON references the files to be watermarked
+ # Watermark target files (JSON references)
references:
- '**/textures/item_texture.json'
- '**/textures/terrain_texture.json'
- '**/textures/flipbook_textures.json'
- # Replace the filenames of all texture files in `paths` not present in vanilla with randomly generated strings from `obfuscate_str`.
+ # Obfuscated texture replacement using `obfuscate_str`
# If `filename_watermark` is true, automatically exclude `filename_watermark.paths`.
filename_obfuscation:
enable: true
@@ -109,7 +110,7 @@ obfuscator:
- '**/textures/ui/**'
- '**/textures/number/**'
- '**/textures/spawn_point/**'
- # JSON references the files to be obfuscated
+ # Obfuscation target files (JSON references)
references:
- '**/ui/**/*.json'
- '**/entity/*.json'
@@ -140,7 +141,7 @@ obfuscator:
debug: false
# JSON without any processing.
- excluded_jsons:
+ exclude_jsons:
- manifest.json
- "**/loading_messages.json"
- "**/blocks.json"
@@ -153,9 +154,12 @@ obfuscator:
- "**/languages.json"
- "**/splashes.json"
- # UI primary controls, variables, and bindings that are not obfuscated, commented, escaped, unformatted, and moved to the front.
- excluded_jsonui_names:
+ # Texture filenames (no obfuscation/watermark)
+ exclude_image_names:
- # Key names, IDs, and Molang variable names in entities, models, particles, materials, animations, animation controllers, and render controllers that are not obfuscated, escaped, or unformatted.
- excluded_entity_names:
+ # UI primary controls, variables, and bindings (no obfuscation/commenting/escaping/formatting/front-positioning)
+ exclude_jsonui_names:
+
+ # Key names, IDs, and Molang variables in entities, models, particles, materials, animations, animation controllers, and render controllers (no obfuscation/escaping/formatting)
+ exclude_entity_names:
diff --git a/docs/README.zh-CN.md b/docs/README.zh-CN.md
index 71be856..417d262 100644
--- a/docs/README.zh-CN.md
+++ b/docs/README.zh-CN.md
@@ -84,6 +84,12 @@ python main.py
+[PayPal](https://www.paypal.com/paypalme/Airkk426)
+
+[爱发电](https://afdian.com/a/Eric_Joker)
+
+非常感谢您的支持\~您的头像会出现在我的 Github 主页和本项目的 Readme 中\~
+
## 感谢
感谢此项目的所有贡献者!
diff --git a/main.py b/main.py
index 6ed680e..c232dab 100644
--- a/main.py
+++ b/main.py
@@ -159,11 +159,11 @@ def process_image(fh: FileHandler, vd: set, l: list):
fh.cut = os.path.splitext(cut)[0] if insub else fh.path
if glob.globmatch(fh.path, cfg.watermark_paths, flags=glob.D | glob.G | glob.N):
renames.append(fh)
- pbm.update_t_item(1)
+ pbm.update_t_item()
return
elif glob.globmatch(fh.path, cfg.obfuscate_paths, flags=glob.D | glob.G | glob.N):
obf_names.append(fh)
- pbm.update_t_item(1)
+ pbm.update_t_item()
return
if cfg.image_compress != -1 or cfg.extrainfo:
l.append(fh)
@@ -173,7 +173,7 @@ def process_image(fh: FileHandler, vd: set, l: list):
rel_path = os.path.relpath((path := os.path.join(root, file)), root_path)
if glob.globmatch(
rel_path,
- itertools.chain(("!manifest.json"), cfg.excluded_files) if cfg.mod_manifest else cfg.excluded_files,
+ itertools.chain(("!manifest.json"), cfg.exclude_files) if cfg.mod_manifest else cfg.exclude_files,
flags=glob.D | glob.G | glob.N,
):
continue
@@ -188,11 +188,11 @@ def process_image(fh: FileHandler, vd: set, l: list):
pbm.update_t_item(sum((cfg.image_compress > 6, cfg.extrainfo)))
elif rel_path.endswith(".lang"):
if cfg.obfuscate_jsonui:
- pbm.update_t_item(1)
+ pbm.update_t_item()
langs.append(fh)
elif rel_path == "manifest.json":
manifest = fh
- pbm.update_t_item(1)
+ pbm.update_t_item()
elif glob.globmatch(rel_path, ("materials/*.material", "subpacks/*/materials/*.material"), flags=glob.D):
splited = fh.path.split(os.sep)
fh.subpack_path = os.sep.join(splited[:2]) if "subpacks" in fh.path else ""
@@ -209,14 +209,14 @@ def process_image(fh: FileHandler, vd: set, l: list):
if glob.globmatch(rel_path, cfg.wm_references, flags=glob.D | glob.G | glob.N):
texture_jsons.append(fh)
- pbm.update_t_item(1)
+ pbm.update_t_item()
elif glob.globmatch(rel_path, cfg.obf_references, flags=glob.D | glob.G | glob.N):
texture_jsons_2.append(fh)
- pbm.update_t_item(1)
+ pbm.update_t_item()
if cfg.merged_ui_path and rel_path.endswith("_global_variables.json"):
ui_global_vars.append(fh)
elif cfg.merged_ui_path and rel_path.endswith("_ui_defs.json"):
- pbm.update_t_item(1)
+ pbm.update_t_item()
ui_defs.append(fh)
elif glob.globmatch(
rel_path,
@@ -234,7 +234,7 @@ def process_image(fh: FileHandler, vd: set, l: list):
jsonuis.append(fh)
elif glob.globmatch(rel_path, ("entity/**/*", "subpacks/*/entity/**/*"), flags=glob.D | glob.G):
if cfg.obfuscate_entity:
- pbm.update_t_item(1)
+ pbm.update_t_item()
entities.append(fh)
elif glob.globmatch(
rel_path,
@@ -256,11 +256,11 @@ def process_image(fh: FileHandler, vd: set, l: list):
rcs.append(fh)
elif glob.globmatch(rel_path, ("particles/**/*", "subpacks/*/particles/**/*"), flags=glob.D | glob.G):
if cfg.obfuscate_entity:
- pbm.update_t_item(1)
+ pbm.update_t_item()
particles.append(fh)
elif glob.globmatch(rel_path, ("materials/*", "subpacks/*/materials/*"), flags=glob.D):
if cfg.merge_entity:
- pbm.update_t_item(1)
+ pbm.update_t_item()
material_indexes.append(fh)
elif glob.globmatch(
rel_path,
@@ -271,15 +271,15 @@ def process_image(fh: FileHandler, vd: set, l: list):
else:
mkdirs(new_path := os.path.join(work_path, os.path.dirname(rel_path)))
shutil.copy2(path, new_path)
- pbm.update_n_file(1)
+ pbm.update_n_file()
# stats texture json
if cfg.watermark_paths or cfg.obfuscate_paths:
- for f in renames + obf_names:
- if os.path.exists(os.path.join(root_path, rel_path := f"{os.path.splitext(f.path)[0]}.json")):
+ for file in renames + obf_names:
+ if os.path.exists(os.path.join(root_path, rel_path := f"{os.path.splitext(file.path)[0]}.json")):
for j in std_jsons:
if rel_path == j:
image_jsons.append(j)
- pbm.update_t_item(1)
+ pbm.update_t_item()
if cfg.nomedia:
with default_write(os.path.join(work_path, ".nomedia")):
diff --git a/models/pbar_manager.py b/models/pbar_manager.py
index 77dbb85..739e662 100644
--- a/models/pbar_manager.py
+++ b/models/pbar_manager.py
@@ -41,23 +41,23 @@ def update_t_file(self):
# self.pbar.refresh()
@check_console
- def revert_t_item(self, increment):
+ def revert_t_item(self, increment=1):
self.pbar.total -= increment
# self.pbar.refresh()
@check_console
- def update_t_item(self, increment):
+ def update_t_item(self, increment=1):
self.pbar.total += increment
# self.pbar.refresh()
@check_console
- def update_n_file(self, increment):
+ def update_n_file(self, increment=1):
self.n_file += increment
self.pbar.unit = f"items {self.n_file}/{self.t_file}files"
# self.pbar.refresh()
@check_console
- def update(self, increment):
+ def update(self, increment=1):
self.pbar.unit = f"items {self.n_file}/{self.t_file}files"
self.pbar.update(increment)
diff --git a/obfuscators/entities.py b/obfuscators/entities.py
index c0f7fe6..8db7f50 100644
--- a/obfuscators/entities.py
+++ b/obfuscators/entities.py
@@ -104,24 +104,24 @@ def _merge_some_dict(self, data: dict, filetype: str, control_char: str, **merge
if controls := data.get(filetype):
version: str = data.get("format_version")
for k, v in controls.items():
- if k[len(control_char) :] not in cfg.excluded_entity_names:
+ if k[len(control_char) :] not in cfg.exclude_entity_names:
merged_dicts[f"v{version.replace(".", "")}"].setdefault(filetype, {})[k] = v
def _merge_model(self, data: dict, _: str, control_char: str, **merged_dicts):
if (version := data.get("format_version", "1.8.0")) == "1.8.0" or version == "1.10.0":
for k, v in data.items():
- if k != "format_version" and k[len(control_char) :] not in cfg.excluded_entity_names:
+ if k != "format_version" and k[len(control_char) :] not in cfg.exclude_entity_names:
merged_dicts[f"v{version.replace(".", "")}"][k] = v
else:
for i in data.get("minecraft:geometry"):
if (
i.get("description", {}).get("identifier", "").partition(":")[0][len(control_char) :][len(control_char) :]
- not in cfg.excluded_entity_names
+ not in cfg.exclude_entity_names
):
merged_dicts[f"v{version.replace(".", "")}"].setdefault("minecraft:geometry", []).append(i)
async def _async_merge_common(self, fun: Callable, filetype: str, control_char="", **merged_dicts):
- excluded_paths = set()
+ exclude_paths = set()
fh_list: list[FileHandler] = getattr(self, filetype)
# TODO: Since I don't need it, the functionality of obfuscating materials has not been tested.
@@ -143,16 +143,16 @@ async def _async_merge_common(self, fun: Callable, filetype: str, control_char="
or self.material_indexes
and filetype == "materials"
and j.cut not in can_merge
- or any(os.path.basename(j.cut).split("/")[-1].startswith(e) for e in cfg.excluded_entity_names)
+ or any(os.path.basename(j.cut).split("/")[-1].startswith(e) for e in cfg.exclude_entity_names)
or j.cut in getattr(vd, filetype)
):
self.processed[j.path] = await self.async_get_json_data(j)
- excluded_paths.add(j.cut)
+ exclude_paths.add(j.cut)
for j in fh_list.copy():
splited = os.path.basename(j.path).partition(".")
# obfuscate the filenames of files that cannot be merged
- if (is_exclude := j.cut in excluded_paths) or not cfg.merge_entity or j.subpack_path:
+ if (is_exclude := j.cut in exclude_paths) or not cfg.merge_entity or j.subpack_path:
if cfg.obfuscate_entity and not is_exclude:
j.path = os.path.join(os.path.dirname(j.path), gen_obfstr(splited[0], OBFStrType.OBFFILE) + splited[2])
# j.path = os.path.join(os.path.dirname(j.path), gen_obfstr(splited[0], OBFStrType.OBFFILE, 2) + splited[2])
@@ -160,7 +160,7 @@ async def _async_merge_common(self, fun: Callable, filetype: str, control_char="
j.processed = True
if cfg.merge_entity:
self.exclude_merge_files.add(j.path)
- pbm.revert_t_item(1)
+ pbm.revert_t_item()
# start merge
else:
fun(
@@ -174,8 +174,8 @@ async def _async_merge_common(self, fun: Callable, filetype: str, control_char="
fh_list.remove(j)
pbm.revert_t_item(sum((cfg.comment, cfg.empty_dict, cfg.sort, cfg.unicode, cfg.obfuscate_entity)))
- pbm.update_n_file(1)
- pbm.update(1)
+ pbm.update_n_file()
+ pbm.update()
for k, v in merged_dicts.items():
if len(v) > 1:
@@ -193,7 +193,7 @@ async def _async_merge_common(self, fun: Callable, filetype: str, control_char="
)
for j in self.material_indexes:
self.processed[j.path] = instance.traverse(await self.async_get_json_data(j))
- pbm.update(1)
+ pbm.update()
async def _async_obf_common(
self, filetype: str, mapping=None, eh=EntityHandler(), versions: tuple[str] = (), get_id: Callable = None
@@ -210,10 +210,10 @@ async def _async_obf_common(
await self.async_get_json_data(j), mapping, eh, self.dag, get_id
)
- pbm.update(1)
+ pbm.update()
j.processed = True
else:
- pbm.revert_t_item(1)
+ pbm.revert_t_item()
async def async_obf_ac(self):
merged_1_10 = {"format_version": "1.10.0"}
@@ -459,7 +459,7 @@ def get_truly_id(self, identifier: str):
def is_exclude(self, data: str, vd: set | dict | tuple | Callable, identifier: str = None):
return (
- (splited := self.get_truly_id(data))[0] in self.cfg.excluded_entity_names
+ (splited := self.get_truly_id(data))[0] in self.cfg.exclude_entity_names
or splited[0] in (vd if isinstance(vd, (set, dict, tuple)) else vd(self.handler.dag_type, identifier)),
splited[0],
splited[1],
@@ -526,7 +526,7 @@ def repl(match: re.Match):
(char := match.group(1))[0] == "q"
or char != "array"
and match.group(2) in self.vd.get_molang_vars(self.handler.dag_type, identifier)
- or match.group(2) in self.cfg.excluded_entity_names
+ or match.group(2) in self.cfg.exclude_entity_names
):
return f"{char}.{match.group(2)}"
if char == "array":
diff --git a/obfuscators/images.py b/obfuscators/images.py
index 869dbea..6eedcaf 100644
--- a/obfuscators/images.py
+++ b/obfuscators/images.py
@@ -17,6 +17,7 @@
import os
import random
from functools import partial
+from itertools import chain
import aiofiles
import aioshutil
@@ -25,7 +26,7 @@
from config import cfg
from models import FileHandler, OBFStrType, pbm
-from utils import TraverseJson, async_mkdirs, async_pil_dump, async_pil_load, gen_obfstr
+from utils import async_mkdirs, async_pil_dump, async_pil_load, gen_obfstr
from . import OBF
@@ -44,70 +45,79 @@ async def async_rename(
self.pngs = pngs
self.tgas = tgas
- for item in renames + obf_names:
- need_obf = item in obf_names
- # If the filenames of the subpack are different from those of the main pack, it will lead to a decrease in resource pack performance.
- (rd := random.Random()).seed(
- item.path if await self._async_check_sub_ref(item, texture_jsons_2 if need_obf else texture_jsons) else item.cut
- )
- name, ext = os.path.splitext(os.path.basename(item.path))
-
- if item in obf_names:
- new_name = gen_obfstr(name, OBFStrType.OBFFILE) + ext
+ for file, need_obf in chain(((x, False) for x in renames), ((x, True) for x in obf_names)):
+ name, ext = os.path.splitext(os.path.basename(file.path))
+ if name in cfg.exclude_image_names:
+ new_dir = os.path.join(self.work_path, os.path.dirname(file.path))
+ new_path = os.path.join(self.work_path, file.path)
+ pbm.revert_t_item()
else:
- # insert string
- len1 = len(self.namespace)
- if (len2 := len(name)) == 1:
- return self.namespace[: (insert_position := rd.randint(0, len1))] + name + self.namespace[insert_position:]
- insert_positions = sorted(rd.sample(range(len1 + len2), len1))
- result, char1_index, char2_index = [], 0, 0
- for i in range(len1 + len2):
- if char1_index < len1 and i == insert_positions[char1_index]:
- result.append(self.namespace[char1_index])
- char1_index += 1
- else:
- result.append(name[char2_index])
- char2_index += 1
- new_name = "".join(result) + ext
-
- new_path = os.path.join(new_dir := os.path.join(self.work_path, os.path.dirname(item.path)), new_name)
- old_path = os.path.join(self.pack_path, item.path)
+ # If the filenames of the subpack are different from those of the main pack, it will lead to a decrease in resource pack performance.
+ (rd := random.Random()).seed(
+ file.path
+ if await self._async_check_sub_ref(file, texture_jsons_2 if need_obf else texture_jsons)
+ else file.cut
+ )
+ if need_obf:
+ new_name = gen_obfstr(name, OBFStrType.OBFFILE) + ext
+ else:
+ # insert string
+ len1 = len(self.namespace)
+ if (len2 := len(name)) == 1:
+ insert_position = rd.randint(0, len1)
+ return self.namespace[:insert_position] + name + self.namespace[insert_position:]
+ insert_positions = sorted(rd.sample(range(len1 + len2), len1))
+ result, char1_index, char2_index = [], 0, 0
+ for i in range(len1 + len2):
+ if char1_index < len1 and i == insert_positions[char1_index]:
+ result.append(self.namespace[char1_index])
+ char1_index += 1
+ else:
+ result.append(name[char2_index])
+ char2_index += 1
+ new_name = "".join(result) + ext
+ new_path = os.path.join(new_dir := os.path.join(self.work_path, os.path.dirname(file.path)), new_name)
+ OBFStrType.FILENAME.bi_map[file.path.replace("\\", "/")] = os.path.join(
+ os.path.dirname(file.path), new_name
+ ).replace("\\", "/")
+ pbm.update()
+
+ old_path = os.path.join(self.pack_path, file.path)
try:
await async_mkdirs(new_dir)
await aioshutil.copy2(old_path, new_path)
except Exception as e:
print(f"An error occurred while rename image ({new_path}):{e}")
self.logger.exception(e)
- (
+ if ext == ".png":
self.pngs.append(FileHandler(new_path, processed=True))
- if ext == ".png"
- else self.tgas.append(FileHandler(new_path, processed=True))
- )
- OBFStrType.FILENAME.bi_map[item.path.replace("\\", "/")] = os.path.join(
- os.path.dirname(item.path), new_name
- ).replace("\\", "/")
-
- pbm.update(1)
+ else:
+ self.tgas.append(FileHandler(new_path, processed=True))
# rename the JSON files to match the names of the image files.
for j in image_jsons:
- new_dir = os.path.join(self.work_path, (rel_dir := os.path.dirname(j.path)))
- path = os.path.splitext(j.path)[0].replace("\\", "/")
- new_name = os.path.splitext(
- os.path.basename(OBFStrType.FILENAME.bi_map.get(f"{path}.png") or OBFStrType.FILENAME.bi_map.get(f"{path}.tga"))
- )[0]
- new_path = os.path.join(new_dir, f"{new_name}.json")
- old_path = os.path.join(self.pack_path, j.path)
- try:
- await async_mkdirs(new_dir)
- await aioshutil.copy2(old_path, new_path)
- except Exception as e:
- print(f"An error occurred while rename json ({new_path}):{e}")
- self.logger.exception(e)
+ if os.path.splitext(os.path.basename(j.path))[0] in cfg.exclude_image_names:
+ pbm.revert_t_item()
+ else:
+ new_dir = os.path.join(self.work_path, (rel_dir := os.path.dirname(j.path)))
+ path = os.path.splitext(j.path)[0].replace("\\", "/")
+ new_name = os.path.splitext(
+ os.path.basename(
+ OBFStrType.FILENAME.bi_map.get(f"{path}.png") or OBFStrType.FILENAME.bi_map.get(f"{path}.tga")
+ )
+ )[0]
+ new_path = os.path.join(new_dir, f"{new_name}.json")
+ old_path = os.path.join(self.pack_path, j.path)
+ try:
+ await async_mkdirs(new_dir)
+ await aioshutil.copy2(old_path, new_path)
+ except Exception as e:
+ print(f"An error occurred while rename json ({new_path}):{e}")
+ self.logger.exception(e)
- j.path = os.path.join(rel_dir, f"{new_name}.json")
- j.processed = True
- pbm.update(1)
+ j.path = os.path.join(rel_dir, f"{new_name}.json")
+ j.processed = True
+ pbm.update()
# fix jsons
def repl(match: re.Match, subp):
@@ -128,9 +138,7 @@ def repl(match: re.Match, subp):
return data
subp_pattern = re.compile(r"[/\\]?(subpacks[/\\].+?)[/\\]")
- value_pattern = re.compile(
- r'(?<=(? 6, cfg.extrainfo)))
async def _async_check_sub_ref(self, item: FileHandler, jsons: list[FileHandler]):
diff --git a/obfuscators/jsons.py b/obfuscators/jsons.py
index 9168457..7e13fd9 100644
--- a/obfuscators/jsons.py
+++ b/obfuscators/jsons.py
@@ -13,12 +13,12 @@
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
-import itertools
import json
import os
import random
import time
import uuid
+from itertools import chain
from typing import Any
import aiofiles
@@ -36,15 +36,15 @@
class Jsons(OBF):
async def async_obf(self, *args: list[FileHandler]):
self.comment_pattern = re.compile(r'(?<="[^"]*"):(?=\s*[^",\{\[]|".*?[^"]*")')
- update_pbar = lambda: None if is_merged else pbm.update(1)
+ update_pbar = lambda: None if is_merged else pbm.update()
- for j in itertools.chain(*args):
+ for j in chain(*args):
path = os.path.join(self.work_path if j.processed else self.pack_path, j.path)
new_dir = os.path.dirname(new_path := os.path.join(self.work_path, j.path))
is_merged = j.path == cfg.merged_ui_path or "MERGED" in OBFStrType.OBFFILE.bi_map.backward.get(
os.path.splitext(os.path.basename(j.path))[0], ""
)
- if glob.globmatch(j.path, cfg.excluded_jsons, flags=glob.D | glob.G):
+ if glob.globmatch(j.path, cfg.exclude_jsons, flags=glob.D | glob.G):
if not j.processed:
await async_mkdirs(new_dir)
await aioshutil.copy2(path, new_path)
@@ -68,9 +68,9 @@ async def async_obf(self, *args: list[FileHandler]):
if not cfg.unformat:
data = default_dumps(json.loads(data) if isinstance(data, str) else data, indent=2)
if cfg.empty_dict:
- if any(s in str(data) for s in cfg.excluded_entity_names):
+ if any(s in str(data) for s in cfg.exclude_entity_names):
if not is_merged:
- pbm.revert_t_item(1)
+ pbm.revert_t_item()
else:
data = (data if isinstance(data, str) else default_dumps(data)) + "{}"
update_pbar()
@@ -86,15 +86,15 @@ async def async_obf(self, *args: list[FileHandler]):
self.logger.exception(e)
if not is_merged:
- pbm.update_n_file(1)
+ pbm.update_n_file()
pbm.pbar.refresh()
def is_exclude(self, data: str, plus=True):
return (
plus
- and data in cfg.excluded_names
- or data.partition("@")[0] in cfg.excluded_jsonui_names
- or any(".".join(data[-len(s.split(".")) :]) == s for s in cfg.excluded_entity_names)
+ and data in cfg.exclude_names
+ or data.partition("@")[0] in cfg.exclude_jsonui_names
+ or any(".".join(data[-len(s.split(".")) :]) == s for s in cfg.exclude_entity_names)
)
def encode_to_unicode(self, data):
@@ -126,7 +126,7 @@ def process_dict(data: dict):
return new_dict, stop
def process_str(data: str, *_):
- return data if data in cfg.excluded_names else "".join([rf"\u{ord(c):04x}" for c in data])
+ return data if data in cfg.exclude_names else "".join([rf"\u{ord(c):04x}" for c in data])
return TraverseJson(process_dict, str_fun=process_str).traverse(data)
@@ -220,5 +220,5 @@ async def async_manifest(
except Exception as e:
print(f"An error occurred while writing json ({new_path}):{e}")
self.logger.exception(e)
- pbm.update_n_file(1)
- pbm.update(1)
+ pbm.update_n_file()
+ pbm.update()
diff --git a/obfuscators/uis.py b/obfuscators/uis.py
index bba8676..7f3d79f 100644
--- a/obfuscators/uis.py
+++ b/obfuscators/uis.py
@@ -87,10 +87,10 @@ async def async_obf(
async def async_merge(self):
control_split_pattern = re.compile(r"[@\.]")
- excluded_namespace = set()
+ exclude_namespace = set()
def stats_ctrl_name(data: dict, renamed: list):
- if (ns := data.get("namespace")) not in excluded_namespace:
+ if (ns := data.get("namespace")) not in exclude_namespace:
self.uniqueui_namespace.append(ns)
new_dict = {"namespace": self.namespace}
@@ -103,7 +103,7 @@ def stats_ctrl_name(data: dict, renamed: list):
return new_dict, True
def fix_self_namespace_dict(data: dict, is_control: bool, renamed: list):
- if (ns := data.get("namespace")) and ns in excluded_namespace:
+ if (ns := data.get("namespace")) and ns in exclude_namespace:
return {}, True
return {fix_self_namespace_str(k, renamed=renamed) if is_control else k: v for k, v in data.items()}, False
@@ -146,11 +146,11 @@ def fix_all_namespace_str(data: str, *_, namespace: str):
data = self.processed[j.path] = await self.async_get_json_data(j)
ns = (json.loads(comment_pattern.sub("", data)) if isinstance(data, str) else data)["namespace"]
self.uniqueui_namespace.append(ns)
- excluded_namespace.add(ns)
+ exclude_namespace.add(ns)
# start merge
merged_dict = {"namespace": self.namespace}
- excluded_files = set()
+ exclude_files = set()
for j in self.uniqueuis.copy():
renamed_controls = []
data = TraverseJson(partial(stats_ctrl_name, renamed=renamed_controls)).traverse(
@@ -175,11 +175,11 @@ def fix_all_namespace_str(data: str, *_, namespace: str):
self.uniqueuis.remove(j)
pbm.revert_t_item(sum((cfg.comment, cfg.empty_dict, cfg.sort, cfg.unicode, cfg.obfuscate_jsonui)))
- pbm.update_n_file(1)
- pbm.update(1)
+ pbm.update_n_file()
+ pbm.update()
is_exclude = False
else:
- pbm.revert_t_item(1)
+ pbm.revert_t_item()
# TODO: NOT TESTED
# obfuscate the filenames of files that cannot be merged
@@ -195,7 +195,7 @@ def fix_all_namespace_str(data: str, *_, namespace: str):
print(f"An error occurred while write json ({new_path}):{e}")
self.logger.exception(e)
- excluded_files.add(j.cut)
+ exclude_files.add(j.cut)
j.path = os.path.join(rel_dir, new_name)
j.processed = True
@@ -204,7 +204,7 @@ def fix_all_namespace_str(data: str, *_, namespace: str):
# Fix all namespaces for each JsonUI.
for ns in self.uniqueui_namespace:
- if ns not in excluded_namespace:
+ if ns not in exclude_namespace:
instance = TraverseControls(
lambda data, *_: ({fix_all_namespace_str(k, namespace=ns): v for k, v in data.items()}, False),
str_fun=partial(fix_all_namespace_str, namespace=ns),
@@ -241,7 +241,7 @@ def fix_all_namespace_str(data: str, *_, namespace: str):
k: (
# TODO: NOT TESTED
[
- gen_obfstr(i, OBFStrType.OBFFILE) if i in excluded_files else i
+ gen_obfstr(i, OBFStrType.OBFFILE) if i in exclude_files else i
for i in data["ui_defs"]
if os.path.splitext(i)[0] not in unique_cuts
]
@@ -263,7 +263,7 @@ def fix_all_namespace_str(data: str, *_, namespace: str):
self.logger.exception(e)
j.processed = True
- pbm.update(1)
+ pbm.update()
async def async_obf_ctrl_name(self):
def stats_ctrl_dict(data: dict, is_control: bool, is_unique=False):
@@ -388,8 +388,8 @@ def repl(m: re.Match):
data = "{}"
if cfg.obfuscate_jsonui:
data = l10n_pattern.sub(repl, data)
- pbm.update_n_file(1)
- pbm.update(1)
+ pbm.update_n_file()
+ pbm.update()
new_dir = os.path.dirname(new_path := os.path.join(self.work_path, l.path))
try:
await async_mkdirs(new_dir)
@@ -413,8 +413,8 @@ def process_str(data: str, *_):
for j in self.uniqueuis:
self.processed[j.path] = process.traverse(await self.async_get_json_data(j), exclude=False)
j.processed = True
- pbm.update(1)
+ pbm.update()
for j in self.jsonuis:
self.processed[j.path] = process.traverse(await self.async_get_json_data(j), exclude=False)
j.processed = True
- pbm.update(1)
+ pbm.update()
diff --git a/utils/obfuscator.py b/utils/obfuscator.py
index cb1e56f..55606bb 100644
--- a/utils/obfuscator.py
+++ b/utils/obfuscator.py
@@ -133,7 +133,7 @@ def process_dict(self, data: dict[str, Any], *args):
if ns := data.get("namespace"):
new_dict["namespace"] = ns
for k, v in data.items():
- if self.exclude and k.partition("@")[0] in self.cfg.excluded_jsonui_names:
+ if self.exclude and k.partition("@")[0] in self.cfg.exclude_jsonui_names:
new_dict[k] = v
if k in NOT_CONTROL_KEYS:
is_control = False
@@ -157,7 +157,7 @@ def process_list(self, data: list, *args):
def process_str(self, data: str, *args):
from config import cfg
- return data if self.exclude and data.partition("@")[-1] in cfg.excluded_jsonui_names else self.str_fun(data, *args)
+ return data if self.exclude and data.partition("@")[-1] in cfg.exclude_jsonui_names else self.str_fun(data, *args)
ENUM_LINKS = (