Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def get_stock_actorinfo() -> oead.byml.Hash:
actorinfo = util.get_game_file("Actor/ActorInfo.product.sbyml")
return oead.byml.from_binary(util.decompress(actorinfo.read_bytes()))
def get_stock_effects() -> oead.byml.Hash:
bootup_sarc = oead.Sarc(util.get_game_file("Pack/Bootup.pack").read_bytes())
return oead.byml.from_binary(
util.decompress(bootup_sarc.get_file("Ecosystem/StatusEffectList.sbyml").data)
)[0]
Extracts the reference MSYT texts for the given language to a temp dir
:param lang: The game language to use, defaults to USen.
:type lang: str, optional
:param for_merge: Whether the output is to be merged (or as reference), defaults to False
:type for_merge: bool
:param tmp_dir: The temp directory to extract to, defaults to "tmp_text" in BCML's working
directory.
:type tmp_dir: class:`pathlib.Path`, optional
"""
if tmp_dir.exists():
shutil.rmtree(tmp_dir, ignore_errors=True)
with util.get_game_file(f'Pack/Bootup_{lang}.pack').open('rb') as b_file:
bootup_pack = sarc.read_file_and_make_sarc(b_file)
msg_bytes = util.decompress(
bootup_pack.get_file_data(f'Message/Msg_{lang}.product.ssarc').tobytes()
)
msg_pack = sarc.SARC(msg_bytes)
if not for_merge:
merge_dir = tmp_dir / 'ref'
else:
merge_dir = tmp_dir / 'merged'
msg_pack.extract_to_dir(str(merge_dir))
msbt_to_msyt(merge_dir)
def get_stock_quests() -> oead.byml.Array:
title_sarc = oead.Sarc(util.get_game_file("Pack/TitleBG.pack").read_bytes())
return oead.byml.from_binary(
util.decompress(title_sarc.get_file("Quest/QuestProduct.sbquestpack").data)
)
def threaded_merge(item, verbose: bool) -> (str, dict):
"""Deep merges an individual file, suitable for multiprocessing"""
file, stuff = item
failures = {}
base_file = util.get_game_file(file, file.startswith('aoc'))
if (util.get_master_modpack_dir() / file).exists():
base_file = util.get_master_modpack_dir() / file
file_ext = os.path.splitext(file)[1]
if file_ext in util.SARC_EXTS and (util.get_master_modpack_dir() / file).exists():
base_file = (util.get_master_modpack_dir() / file)
file_bytes = base_file.read_bytes()
yazd = file_bytes[0:4] == b'Yaz0'
file_bytes = file_bytes if not yazd else util.decompress(file_bytes)
magic = file_bytes[0:4]
if magic == b'SARC':
new_sarc, sub_failures = nested_patch(sarc.SARC(file_bytes), stuff)
del file_bytes
new_bytes = new_sarc.get_bytes()
for failure, contents in sub_failures.items():
print(f'Some patches to {failure} failed to apply.')
failures[failure] = contents
else:
try:
if magic == b'AAMP':
aamp_contents = aamp.Reader(file_bytes).parse()
for change in stuff:
aamp_contents = _aamp_merge(aamp_contents, change)
aamp_bytes = aamp.Writer(aamp_contents).get_bytes()
pass
if not map_bytes:
if (aoc_dir / 'Map' / 'MainField' / map_unit.section /\
f'{map_unit.section}_{map_unit.type}.smubin').exists():
map_bytes = (tmp_dir / 'aoc' / '0010' / 'Map' / 'MainField' / map_unit.section /\
f'{map_unit.section}_{map_unit.type}.smubin').read_bytes()
elif (tmp_dir / 'content' / 'Map' / 'MainField' / map_unit.section /\
f'{map_unit.section}_{map_unit.type}.smubin').exists():
map_bytes = (tmp_dir / 'content' / 'Map' / 'MainField' / map_unit.section /\
f'{map_unit.section}_{map_unit.type}.smubin').read_bytes()
if not map_bytes:
raise FileNotFoundError(
f'Oddly, the modded map {map_unit.section}_{map_unit.type}.smubin '
'could not be found.'
)
map_bytes = util.decompress(map_bytes)
return byml.Byml(map_bytes).parse()
if not hasattr(get_msbt_hashes, 'texthashes'):
get_msbt_hashes.texthashes = {}
if lang not in get_msbt_hashes.texthashes:
hash_table = util.get_exec_dir() / 'data' / 'msyt' / \
f'Msg_{lang}_hashes.csv'
if hash_table.exists():
get_msbt_hashes.texthashes[lang] = {}
with hash_table.open('r') as h_file:
csv_loop = csv.reader(h_file)
for row in csv_loop:
get_msbt_hashes.texthashes[lang][row[0]] = row[1]
elif util.get_game_file(f'Pack/Bootup_{lang}.pack').exists():
get_msbt_hashes.texthashes[lang] = {}
with util.get_game_file(f'Pack/Bootup_{lang}.pack').open('rb') as b_file:
bootup_pack = sarc.read_file_and_make_sarc(b_file)
msg_bytes = util.decompress(
bootup_pack.get_file_data(f'Message/Msg_{lang}.product.ssarc').tobytes())
msg_pack = sarc.SARC(msg_bytes)
for msbt in msg_pack.list_files():
get_msbt_hashes.texthashes[lang][msbt] = xxhash.xxh32(
msg_pack.get_file_data(msbt)).hexdigest()
return get_msbt_hashes.texthashes[lang]
def threaded_merge(item) -> Tuple[str, dict]:
"""Deep merges an individual file, suitable for multiprocessing"""
file, stuff = item
failures = {}
base_file = util.get_game_file(file, file.startswith(util.get_dlc_path()))
if (util.get_master_modpack_dir() / file).exists():
base_file = util.get_master_modpack_dir() / file
file_ext = os.path.splitext(file)[1]
if file_ext in util.SARC_EXTS and (util.get_master_modpack_dir() / file).exists():
base_file = util.get_master_modpack_dir() / file
file_bytes = base_file.read_bytes()
yazd = file_bytes[0:4] == b"Yaz0"
file_bytes = file_bytes if not yazd else util.decompress(file_bytes)
magic = file_bytes[0:4]
if magic == b"SARC":
new_sarc, sub_failures = nested_patch(oead.Sarc(file_bytes), stuff)
del file_bytes
new_bytes = bytes(new_sarc.write()[1])
for failure, contents in sub_failures.items():
print(f"Some patches to {failure} failed to apply.")
failures[failure] = contents
elif magic == b"AAMP":
try:
aamp_contents = ParameterIO.from_binary(file_bytes)
try:
aamp_contents = shop_merge(
aamp_contents,
file_ext.replace(".", ""),