Browse Source

Simplify log indentation management.

Previously, the indentation level was hardcoded everywhere. This was
tedious before in that changing the log structure would require changing
many manual indentation values. Now that objects can be trivially
generated at export time, the export code might be much more nested that
before. So, it's better to let indentation be more implicit. This,
therefore, adds a context manager to increase the indentation using
`with` blocks. Manual indentation specification remains for
compatibility with Python 2.2 where required.
pull/369/head
Adam Johnson 1 year ago
parent
commit
e5eba455f3
Signed by: Hoikas
GPG Key ID: 0B6515D6FF6F271E
  1. 17
      korman/exporter/animation.py
  2. 2
      korman/exporter/camera.py
  3. 157
      korman/exporter/convert.py
  4. 2
      korman/exporter/decal.py
  5. 65
      korman/exporter/etlight.py
  6. 2
      korman/exporter/image.py
  7. 57
      korman/exporter/locman.py
  8. 57
      korman/exporter/logger.py
  9. 497
      korman/exporter/material.py
  10. 31
      korman/exporter/mesh.py
  11. 8
      korman/exporter/outfile.py
  12. 6
      korman/exporter/physics.py
  13. 8
      korman/exporter/python.py
  14. 106
      korman/exporter/rtlight.py
  15. 10
      korman/korlib/python.py
  16. 4
      korman/korlib/texture.py
  17. 4
      korman/nodes/node_avatar.py
  18. 11
      korman/nodes/node_core.py
  19. 13
      korman/nodes/node_python.py
  20. 24
      korman/operators/op_image.py
  21. 14
      korman/properties/modifiers/anim.py
  22. 8
      korman/properties/modifiers/gui.py
  23. 6
      korman/properties/modifiers/physics.py
  24. 3
      korman/properties/modifiers/region.py
  25. 17
      korman/properties/modifiers/render.py
  26. 6
      korman/properties/modifiers/sound.py
  27. 2
      korman/properties/modifiers/water.py

17
korman/exporter/animation.py

@ -158,10 +158,10 @@ class AnimationConverter:
if energy_curve is None and color_curves is None:
return None
elif lamp.use_only_shadow:
self._exporter().report.warn("Cannot animate Lamp color because this lamp only casts shadows", indent=3)
self._exporter().report.warn("Cannot animate Lamp color because this lamp only casts shadows")
return None
elif not lamp.use_specular and not lamp.use_diffuse:
self._exporter().report.warn("Cannot animate Lamp color because neither Diffuse nor Specular are enabled", indent=3)
self._exporter().report.warn("Cannot animate Lamp color because neither Diffuse nor Specular are enabled")
return None
# OK Specular is easy. We just toss out the color as a point3.
@ -251,7 +251,7 @@ class AnimationConverter:
yield applicator
elif falloff == "INVERSE_SQUARE":
if self._mgr.getVer() >= pvMoul:
report.port("Lamp {} Falloff animations are only supported in Myst Online: Uru Live", falloff, indent=3)
report.port(f"Lamp {falloff} Falloff animations are only supported in Myst Online: Uru Live")
keyframes = self._process_fcurves(omni_fcurves, omni_channels, 1, convert_omni_atten,
omni_defaults, start=start, end=end)
if keyframes:
@ -262,7 +262,7 @@ class AnimationConverter:
applicator.channel = channel
yield applicator
else:
report.warn("Lamp {} Falloff animations are not supported for this version of Plasma", falloff, indent=3)
report.warn(f"Lamp {falloff} Falloff animations are not supported for this version of Plasma")
else:
report.warn("Lamp Falloff '{}' animations are not supported", falloff, ident=3)
@ -294,8 +294,7 @@ class AnimationConverter:
applicator.channel = channel
yield applicator
else:
self._exporter().report.warn("[{}]: Volume animation evaluated to zero keyframes!",
sound.sound.name, indent=2)
self._exporter().report.warn(f"[{sound.sound.name}]: Volume animation evaluated to zero keyframes!")
break
def _convert_spot_lamp_animation(self, name, fcurves, lamp, start, end):
@ -341,8 +340,10 @@ class AnimationConverter:
def _convert_transform_animation(self, bo, fcurves, default_xform, adjust_xform, *, allow_empty: Optional[bool] = False,
start: Optional[int] = None, end: Optional[int] = None) -> Optional[plMatrixChannelApplicator]:
if adjust_xform != mathutils.Matrix.Identity(4):
self._exporter().report.warn(("{}: Transform animation is not local and may export incorrectly. " +
"Please use Alt-P -> Clear Parent Inverse before animating objects to avoid issues.").format(bo.name), indent=1)
self._exporter().report.warn(
f"'{bo.name}': Transform animation is not local and may export incorrectly. "
"Please use Alt-P -> Clear Parent Inverse before animating objects to avoid issues."
)
else:
# Adjustment matrix is identity, just pass None instead...
adjust_xform = None

2
korman/exporter/camera.py

@ -123,7 +123,7 @@ class CameraConverter:
elif props.poa_type == "object":
brain.poaObject = self._mgr.find_create_key(plSceneObject, bl=props.poa_object)
else:
self._report.warn("Circle Camera '{}' has no Point of Attention. Is this intended?", bo.name, indent=3)
self._report.warn(f"Circle Camera '{bo.name}' has no Point of Attention. Is this intended?")
if props.circle_pos == "farthest":
brain.circleFlags |= plCameraBrain1_Circle.kFarthest

157
korman/exporter/convert.py

@ -219,12 +219,13 @@ class Exporter:
inc_progress = self.report.progress_increment
self.report.msg("\nEnsuring Age is sane...")
for bl_obj in self._objects:
for mod in bl_obj.plasma_modifiers.modifiers:
fn = getattr(mod, "sanity_check", None)
if fn is not None:
fn()
inc_progress()
with self.report.indent():
for bl_obj in self._objects:
for mod in bl_obj.plasma_modifiers.modifiers:
fn = getattr(mod, "sanity_check", None)
if fn is not None:
fn()
inc_progress()
self.report.msg("... Age is grinning and holding a spatula. Must be OK, then.")
def _export_age_info(self):
@ -254,7 +255,7 @@ class Exporter:
parent = bo.parent
if parent is not None:
if parent.plasma_object.enabled:
self.report.msg("Attaching to parent SceneObject '{}'", parent.name, indent=1)
self.report.msg(f"Attaching to parent SceneObject '{parent.name}'")
parent_ci = self._export_coordinate_interface(None, parent)
parent_ci.addChild(so.key)
else:
@ -285,42 +286,47 @@ class Exporter:
self.report.msg("\nExporting localization...")
for bl_obj in self._objects:
for mod in filter(lambda x: hasattr(x, "export_localization"), bl_obj.plasma_modifiers.modifiers):
mod.export_localization(self)
inc_progress()
with self.report.indent():
for bl_obj in self._objects:
for mod in filter(lambda x: hasattr(x, "export_localization"), bl_obj.plasma_modifiers.modifiers):
mod.export_localization(self)
inc_progress()
def _export_scene_objects(self):
self.report.progress_advance()
self.report.progress_range = len(self._objects)
inc_progress = self.report.progress_increment
log_msg = self.report.msg
indent = self.report.indent
for bl_obj in self._objects:
log_msg("\n[SceneObject '{}']".format(bl_obj.name))
# First pass: do things specific to this object type.
# note the function calls: to export a MESH, it's _export_mesh_blobj
export_fn = "_export_{}_blobj".format(bl_obj.type.lower())
try:
export_fn = getattr(self, export_fn)
except AttributeError:
self.report.warn("""'{}' is a Plasma Object of Blender type '{}'
... And I have NO IDEA what to do with that! Tossing.""".format(bl_obj.name, bl_obj.type))
continue
log_msg("Blender Object '{}' of type '{}'".format(bl_obj.name, bl_obj.type), indent=1)
# Create a sceneobject if one does not exist.
# Before we call the export_fn, we need to determine if this object is an actor of any
# sort, and barf out a CI.
sceneobject = self.mgr.find_create_object(plSceneObject, bl=bl_obj)
self._export_actor(sceneobject, bl_obj)
export_fn(sceneobject, bl_obj)
# And now we puke out the modifiers...
for mod in bl_obj.plasma_modifiers.modifiers:
log_msg("Exporting '{}' modifier".format(mod.bl_label), indent=1)
mod.export(self, bl_obj, sceneobject)
log_msg(f"\n[SceneObject '{bl_obj.name}']")
with indent():
# First pass: do things specific to this object type.
# note the function calls: to export a MESH, it's _export_mesh_blobj
export_fn = "_export_{}_blobj".format(bl_obj.type.lower())
try:
export_fn = getattr(self, export_fn)
except AttributeError:
self.report.warn("""'{}' is a Plasma Object of Blender type '{}'
... And I have NO IDEA what to do with that! Tossing.""".format(bl_obj.name, bl_obj.type))
continue
log_msg(f"Blender Object '{bl_obj.name}' of type '{bl_obj.type}'")
# Create a sceneobject if one does not exist.
# Before we call the export_fn, we need to determine if this object is an actor of any
# sort, and barf out a CI.
sceneobject = self.mgr.find_create_object(plSceneObject, bl=bl_obj)
self._export_actor(sceneobject, bl_obj)
with indent():
export_fn(sceneobject, bl_obj)
# And now we puke out the modifiers...
for mod in bl_obj.plasma_modifiers.modifiers:
log_msg(f"Exporting '{mod.bl_label}' modifier")
with indent():
mod.export(self, bl_obj, sceneobject)
inc_progress()
def _export_camera_blobj(self, so, bo):
@ -338,27 +344,31 @@ class Exporter:
if bo.data.materials:
self.mesh.export_object(bo, so)
else:
self.report.msg("No material(s) on the ObData, so no drawables", indent=1)
self.report.msg("No material(s) on the ObData, so no drawables")
def _export_font_blobj(self, so, bo):
with utils.temporary_mesh_object(bo) as meshObj:
if bo.data.materials:
self.mesh.export_object(meshObj, so)
else:
self.report.msg("No material(s) on the ObData, so no drawables", indent=1)
self.report.msg("No material(s) on the ObData, so no drawables")
def _export_referenced_node_trees(self):
self.report.progress_advance()
self.report.progress_range = len(self.want_node_trees)
inc_progress = self.report.progress_increment
self.report.msg("\nChecking Logic Trees...")
for tree_name, references in self.want_node_trees.items():
self.report.msg("NodeTree '{}'", tree_name, indent=1)
tree = bpy.data.node_groups[tree_name]
for bo, so in references:
tree.export(self, bo, so)
inc_progress()
log_msg = self.report.msg
indent = self.report.indent
log_msg("\nChecking Logic Trees...")
with indent():
for tree_name, references in self.want_node_trees.items():
log_msg(f"NodeTree '{tree_name}'")
with indent():
tree = bpy.data.node_groups[tree_name]
for bo, so in references:
tree.export(self, bo, so)
inc_progress()
def _harvest_actors(self):
self.report.progress_advance()
@ -401,8 +411,10 @@ class Exporter:
self.report.progress_advance()
self.report.progress_range = len(self._objects)
inc_progress = self.report.progress_increment
self.report.msg("\nPost-Processing SceneObjects...")
log_msg = self.report.msg
indent = self.report.indent
log_msg("\nPost-Processing SceneObjects...")
mat_mgr = self.mesh.material
for bl_obj in self._objects:
sceneobject = self.mgr.find_object(plSceneObject, bl=bl_obj)
@ -421,18 +433,22 @@ class Exporter:
net.propagate_synch_options(sceneobject, layer)
# Modifiers don't have to expose post-processing, but if they do, run it
for mod in bl_obj.plasma_modifiers.modifiers:
proc = getattr(mod, "post_export", None)
if proc is not None:
self.report.msg("Post processing '{}' modifier '{}'", bl_obj.name, mod.bl_label, indent=1)
proc(self, bl_obj, sceneobject)
with indent():
for mod in bl_obj.plasma_modifiers.modifiers:
proc = getattr(mod, "post_export", None)
if proc is not None:
self.report.msg(f"Post processing '{bl_obj.name}' modifier '{mod.bl_label}'")
with indent():
proc(self, bl_obj, sceneobject)
inc_progress()
def _pre_export_scene_objects(self):
self.report.progress_advance()
self.report.progress_range = len(self._objects)
inc_progress = self.report.progress_increment
self.report.msg("\nGenerating export dependency objects...")
log_msg = self.report.msg
indent = self.report.indent
log_msg("\nGenerating export dependency objects...")
# New objects may be generate during this process; they will be appended at the end.
new_objects = []
@ -450,8 +466,10 @@ class Exporter:
@handle_temporary.register(bpy.types.Object)
def _(temporary, parent):
self.exit_stack.enter_context(TemporaryObject(temporary, bpy.data.objects.remove))
self.report.msg("'{}': generated Object '{}' (Plasma Object: {})", parent.name,
temporary.name, temporary.plasma_object.enabled, indent=1)
log_msg(
f"'{parent.name}': generated Object '{temporary.name}' "
f"(Plasma Object: {temporary.plasma_object.enabled})",
)
if temporary.plasma_object.enabled:
new_objects.append(temporary)
@ -461,14 +479,15 @@ class Exporter:
temporary.plasma_object.page = parent.plasma_object.page
# Wow, recursively generated objects. Aren't you special?
for mod in temporary.plasma_modifiers.modifiers:
mod.sanity_check()
do_pre_export(temporary)
with indent():
for mod in temporary.plasma_modifiers.modifiers:
mod.sanity_check()
do_pre_export(temporary)
@handle_temporary.register(bpy.types.NodeTree)
def _(temporary, parent):
self.exit_stack.enter_context(TemporaryObject(temporary, bpy.data.node_groups.remove))
self.report.msg("'{}' generated NodeTree '{}'", parent.name, temporary.name)
log_msg(f"'{parent.name}' generated NodeTree '{temporary.name}'")
if temporary.bl_idname == "PlasmaNodeTree":
parent_so = self.mgr.find_create_object(plSceneObject, bl=parent)
self.want_node_trees[temporary.name].add((parent, parent_so))
@ -482,11 +501,12 @@ class Exporter:
for i in filter(None, result):
handle_temporary(i, bo)
for bl_obj in self._objects:
do_pre_export(bl_obj)
inc_progress()
with indent():
for bl_obj in self._objects:
do_pre_export(bl_obj)
inc_progress()
self.report.msg("... {} new object(s) were generated!", len(new_objects))
log_msg(f"... {len(new_objects)} new object(s) were generated!")
self._objects += new_objects
def _pack_ancillary_python(self):
@ -506,12 +526,13 @@ class Exporter:
# If something bad happens in the final flush, it would be a shame to
# simply toss away the potentially freshly regenerated texture cache.
try:
self.locman.save()
self.mgr.save_age()
self.output.save()
finally:
self.image.save()
with self.report.indent():
try:
self.locman.save()
self.mgr.save_age()
self.output.save()
finally:
self.image.save()
@property
def age_name(self):

2
korman/exporter/decal.py

@ -112,7 +112,7 @@ class DecalConverter:
name = "{}_{}".format(decal_name, bo.name) if is_waveset else decal_name
decal_mgr = exporter.mgr.find_object(pClass, bl=bo, name=name)
if decal_mgr is None:
self._report.msg("Exporing decal manager '{}' to '{}'", decal_name, name, indent=2)
self._report.msg(f"Exporing decal manager '{decal_name}' to '{name}'")
decal_mgr = exporter.mgr.add_object(pClass, bl=bo, name=name)
self._decal_managers[decal_name].append(decal_mgr.key)

65
korman/exporter/etlight.py

@ -112,7 +112,7 @@ class LightBaker:
self._report.msg("\nBaking Static Lighting...")
with GoodNeighbor() as toggle:
with GoodNeighbor() as toggle, self._report.indent():
try:
# reduce the amount of indentation
bake = self._harvest_bakable_objects(objs, toggle)
@ -143,26 +143,25 @@ class LightBaker:
# Step 1: Prepare... Apply UVs, etc, etc, etc
self._report.progress_advance()
self._report.progress_range = len(bake)
self._report.msg("Preparing to bake...", indent=1)
for key, value in bake.items():
if key[0] == "lightmap":
for i in range(len(value)-1, -1, -1):
obj = value[i]
if not self._prep_for_lightmap(obj, toggle):
self._report.msg("Lightmap '{}' will not be baked -- no applicable lights",
obj.name, indent=2)
value.pop(i)
elif key[0] == "vcol":
for i in range(len(value)-1, -1, -1):
obj = value[i]
if not self._prep_for_vcols(obj, toggle):
if self._has_valid_material(obj):
self._report.msg("VCols '{}' will not be baked -- no applicable lights",
obj.name, indent=2)
value.pop(i)
else:
raise RuntimeError(key[0])
inc_progress()
self._report.msg("Preparing to bake...")
with self._report.indent():
for key, value in bake.items():
if key[0] == "lightmap":
for i in range(len(value)-1, -1, -1):
obj = value[i]
if not self._prep_for_lightmap(obj, toggle):
self._report.msg(f"Lightmap '{obj.name}' will not be baked -- no applicable lights")
value.pop(i)
elif key[0] == "vcol":
for i in range(len(value)-1, -1, -1):
obj = value[i]
if not self._prep_for_vcols(obj, toggle):
if self._has_valid_material(obj):
self._report.msg(f"VCols '{obj.name}' will not be baked -- no applicable lights")
value.pop(i)
else:
raise RuntimeError(key[0])
inc_progress()
self._report.msg(" ...")
# Step 2: BAKE!
@ -172,14 +171,15 @@ class LightBaker:
if value:
if key[0] == "lightmap":
num_objs = len(value)
self._report.msg("{} Lightmap(s) [H:{:X}]", num_objs, hash(key[1:]), indent=1)
self._report.msg("{} Lightmap(s) [H:{:X}]", num_objs, hash(key[1:]))
if largest_pass > 1 and num_objs < round(largest_pass * 0.02):
pass_names = set((i.plasma_modifiers.lightmap.bake_pass_name for i in value))
pass_msg = ", ".join(pass_names)
self._report.warn("Small lightmap bake pass! Bake Pass(es): {}".format(pass_msg), indent=2)
with self._report.indent():
self._report.warn(f"Small lightmap bake pass! Bake Pass(es): {pass_msg}")
self._bake_lightmaps(value, key[1:])
elif key[0] == "vcol":
self._report.msg("{} Vertex Color(s) [H:{:X}]", len(value), hash(key[1:]), indent=1)
self._report.msg("{} Vertex Color(s) [H:{:X}]", len(value), hash(key[1:]))
self._bake_vcols(value, key[1:])
self._fix_vertex_colors(value)
else:
@ -327,9 +327,9 @@ class LightBaker:
if mod.image is not None:
uv_texture_names = frozenset((i.name for i in obj.data.uv_textures))
if self.lightmap_uvtex_name in uv_texture_names:
self._report.msg("'{}': Skipping due to valid lightmap override", obj.name, indent=1)
self._report.msg("'{}': Skipping due to valid lightmap override", obj.name)
else:
self._report.warn("'{}': Have lightmap, but regenerating UVs", obj.name, indent=1)
self._report.warn("'{}': Have lightmap, but regenerating UVs", obj.name)
self._prep_for_lightmap_uvs(obj, mod.image, toggle)
return False
return True
@ -341,12 +341,12 @@ class LightBaker:
vcol_layer_names = frozenset((vcol_layer.name.lower() for vcol_layer in obj.data.vertex_colors))
manual_layer_names = _VERTEX_COLOR_LAYERS & vcol_layer_names
if manual_layer_names:
self._report.msg("'{}': Skipping due to valid manual vertex color layer(s): '{}'", obj.name, manual_layer_names.pop(), indent=1)
self._report.msg("'{}': Skipping due to valid manual vertex color layer(s): '{}'", obj.name, manual_layer_names.pop())
return False
if self.force:
return True
if self.vcol_layer_name.lower() in vcol_layer_names:
self._report.msg("'{}': Skipping due to valid matching vertex color layer(s): '{}'", obj.name, self.vcol_layer_name, indent=1)
self._report.msg("'{}': Skipping due to valid matching vertex color layer(s): '{}'", obj.name, self.vcol_layer_name)
return False
return True
@ -377,9 +377,9 @@ class LightBaker:
key = (method,) + lm_layers
bake_pass = bake.setdefault(key, [])
bake_pass.append(i)
self._report.msg("'{}': Bake to {}", i.name, method, indent=1)
self._report.msg("'{}': Bake to {}", i.name, method)
elif mods.lighting.preshade and vcol_bake_required(i):
self._report.msg("'{}': Bake to vcol (crappy)", i.name, indent=1)
self._report.msg("'{}': Bake to vcol (crappy)", i.name)
bake_vcol.append(i)
return bake
@ -435,7 +435,8 @@ class LightBaker:
im = data_images.new(im_name, width=size, height=size)
self._lightmap_images[bo.name] = im
self._prep_for_lightmap_uvs(bo, im, toggle)
with self._report.indent():
self._prep_for_lightmap_uvs(bo, im, toggle)
# Now, set the new LIGHTMAPGEN uv layer as what we want to render to...
# NOTE that this will need to be reset by us to what the user had previously
@ -492,7 +493,7 @@ class LightBaker:
if self._mesh.is_collapsed(bo):
# Danger: uv_base.name -> UnicodeDecodeError (wtf? another blender bug?)
self._report.warn("'{}': packing islands in UV Texture '{}' due to modifier collapse",
bo.name, modifier.uv_map, indent=2)
bo.name, modifier.uv_map)
with self._set_mode("EDIT"):
bpy.ops.mesh.select_all(action="SELECT")
bpy.ops.uv.select_all(action="SELECT")

2
korman/exporter/image.py

@ -160,7 +160,7 @@ class ImageCache:
try:
cached_image.image_data = tuple(self._read_image_data(cached_image, self._read_stream))
except AssertionError:
self._report.warn("Cached copy of '{}' is corrupt and will be discarded", cached_image.name, indent=2)
self._report.warn(f"Cached copy of '{cached_image.name}' is corrupt and will be discarded")
self._images.pop(key)
return None
return cached_image

57
korman/exporter/locman.py

@ -69,12 +69,12 @@ class LocalizationConverter:
self._version = kwargs.get("version")
self._strings = defaultdict(lambda: defaultdict(dict))
def add_string(self, set_name, element_name, language, value, indent=0):
self._report.msg("Accepted '{}' translation for '{}'.", element_name, language, indent=indent)
def add_string(self, set_name, element_name, language, value):
self._report.msg("Accepted '{}' translation for '{}'.", element_name, language)
if isinstance(value, bpy.types.Text):
if value.is_modified:
self._report.warn("'{}' translation for '{}' is modified on the disk but not reloaded in Blender.",
element_name, language, indent=indent)
element_name, language)
value = value.as_string()
for dc in _DUMB_CHARACTERS:
@ -86,7 +86,7 @@ class LocalizationConverter:
if value != old_value:
self._report.warn(
"'{}' translation for '{}' has an illegal {}, which was replaced with: {}",
element_name, language, dc.desc, dc.sub, indent=indent
element_name, language, dc.desc, dc.sub
)
self._strings[set_name][element_name][language] = value
@ -116,7 +116,7 @@ class LocalizationConverter:
stream.write(contents.encode("windows-1252"))
except UnicodeEncodeError:
self._report.warn("Translation '{}': Contents contains characters that cannot be used in this version of Plasma. They will appear as a '?' in game.",
language, indent=2)
language)
# Yes, there are illegal characters... As a stopgap, we will export the file with
# replacement characters ("?") just so it'll work dammit.
@ -125,28 +125,30 @@ class LocalizationConverter:
locs = itertools.chain(self._strings["Journals"].items(), self._strings["DynaTexts"].items())
for journal_name, translations in locs:
self._report.msg("Copying localization '{}'", journal_name, indent=1)
for language_name, value in translations.items():
if language_name not in _SP_LANGUAGES:
self._report.warn("Translation '{}' will not be used because it is not supported in this version of Plasma.",
language_name, indent=2)
continue
suffix = "_{}".format(language_name.lower()) if language_name != "English" else ""
file_name = "{}--{}{}.txt".format(age_name, journal_name, suffix)
write_text_file(language_name, file_name, value)
self._report.msg(f"Copying localization '{journal_name}'")
with self._report.indent():
for language_name, value in translations.items():
if language_name not in _SP_LANGUAGES:
self._report.warn("Translation '{}' will not be used because it is not supported in this version of Plasma.",
language_name)
continue
suffix = "_{}".format(language_name.lower()) if language_name != "English" else ""
file_name = "{}--{}{}.txt".format(age_name, journal_name, suffix)
write_text_file(language_name, file_name, value)
# Ensure that default (read: "English") journal is available
if "English" not in translations:
language_name, value = next(((language_name, value) for language_name, value in translations.items()
if language_name in _SP_LANGUAGES), (None, None))
if language_name is not None:
file_name = "{}--{}.txt".format(age_name, journal_name)
# If you manage to screw up this badly... Well, I am very sorry.
if write_text_file(language_name, file_name, value):
self._report.warn("No 'English' translation available, so '{}' will be used as the default",
language_name, indent=2)
else:
self._report.port("No 'English' nor any other suitable default translation available", indent=2)
with self._report.indent():
if "English" not in translations:
language_name, value = next(((language_name, value) for language_name, value in translations.items()
if language_name in _SP_LANGUAGES), (None, None))
if language_name is not None:
file_name = "{}--{}.txt".format(age_name, journal_name)
# If you manage to screw up this badly... Well, I am very sorry.
if write_text_file(language_name, file_name, value):
self._report.warn("No 'English' translation available, so '{}' will be used as the default",
language_name)
else:
self._report.port("No 'English' nor any other suitable default translation available")
def _generate_loc_files(self):
if not self._strings:
@ -243,10 +245,9 @@ class LocalizationConverter:
if modifier.enabled:
translations = [j for j in modifier.translations if j.text_id is not None]
if not translations:
self._report.error("'{}': No content translations available. The localization will not be exported.",
i.name, indent=2)
self._report.error(f"'{i.name}': No content translations available. The localization will not be exported.")
for j in translations:
self.add_string(modifier.localization_set, modifier.key_name, j.language, j.text_id, indent=1)
self.add_string(modifier.localization_set, modifier.key_name, j.language, j.text_id)
inc_progress()
def _run_generate(self):

57
korman/exporter/logger.py

@ -13,25 +13,34 @@
# You should have received a copy of the GNU General Public License
# along with Korman. If not, see <http://www.gnu.org/licenses/>.
from ..korlib import ConsoleCursor, ConsoleToggler
from .explosions import NonfatalExportError
from __future__ import annotations
from contextlib import contextmanager
from pathlib import Path
import threading
import time
from typing import *
if TYPE_CHECKING:
from io import TextIOWrapper
from ..korlib import ConsoleCursor, ConsoleToggler
from .explosions import NonfatalExportError
_HEADING_SIZE = 60
_MAX_ELIPSES = 3
_MAX_TIME_UNTIL_ELIPSES = 2.0
class _ExportLogger:
def __init__(self, print_logs, age_path=None):
self._errors = []
self._porting = []
self._warnings = []
def __init__(self, print_logs: bool, age_path: Optional[str] = None):
self._errors: List[str] = []
self._porting: List[str] = []
self._warnings: List[str] = []
self._age_path = Path(age_path) if age_path is not None else None
self._file = None
self._file: Optional[TextIOWrapper] = None
self._print_logs = print_logs
self._time_start_overall = 0
self._time_start_overall: float = 0.0
self._indent_level: int = 0
def __enter__(self):
if self._age_path is not None:
@ -48,10 +57,22 @@ class _ExportLogger:
self._file.close()
return False
@contextmanager
def indent(self):
try:
self._indent_level += 1
yield
finally:
self._indent_level -= 1
@property
def indent_level(self) -> int:
return self._indent_level
def error(self, *args, **kwargs):
assert args
indent = kwargs.get("indent", 0)
msg = "{}ERROR: {}".format(" " * indent, args[0])
indent = kwargs.get("indent", self._indent_level)
msg = f"{' ' * indent}ERROR: {args[0]}"
if len(args) > 1:
msg = msg.format(*args[1:], **kwargs)
if self._file is not None:
@ -63,8 +84,8 @@ class _ExportLogger:
def msg(self, *args, **kwargs):
assert args
indent = kwargs.get("indent", 0)
msg = "{}{}".format(" " * indent, args[0])
indent = kwargs.get("indent", self._indent_level)
msg = f"{' ' * indent}{args[0]}"
if len(args) > 1:
msg = msg.format(*args[1:], **kwargs)
if self._file is not None:
@ -74,8 +95,8 @@ class _ExportLogger:
def port(self, *args, **kwargs):
assert args
indent = kwargs.get("indent", 0)
msg = "{}PORTING: {}".format(" " * indent, args[0])
indent = kwargs.get("indent", self._indent_level)
msg = f"{' ' * indent}PORTNING: {args[0]}"
if len(args) > 1:
msg = msg.format(*args[1:], **kwargs)
if self._file is not None:
@ -98,14 +119,14 @@ class _ExportLogger:
def progress_end(self):
if self._age_path is not None:
export_time = time.perf_counter() - self._time_start_overall
self.msg("\nExported '{}' in {:.2f}s", self._age_path.name, export_time)
self.msg(f"\nExported '{self._age_path.name}' in {export_time:.2f}s")
def progress_increment(self):
pass
def progress_start(self, action):
if self._age_path is not None:
self.msg("Exporting '{}'", self._age_path.name)
self.msg(f"Exporting '{self._age_path.name}'")
self._time_start_overall = time.perf_counter()
def raise_errors(self):
@ -122,8 +143,8 @@ class _ExportLogger:
def warn(self, *args, **kwargs):
assert args
indent = kwargs.get("indent", 0)
msg = "{}WARNING: {}".format(" " * indent, args[0])
indent = kwargs.get("indent", self._indent_level)
msg = f"{' ' * indent}WARNING: {args[0]}"
if len(args) > 1:
msg = msg.format(*args[1:], **kwargs)
if self._file is not None:

497
korman/exporter/material.py

@ -193,9 +193,8 @@ class MaterialConverter:
# being a waveset, doublesided, etc.
single_user = self._requires_single_user(bo, bm)
if single_user:
mat_name = "{}_AutoSingle".format(bm.name) if bo.name == bm.name else "{}_{}".format(bo.name, bm.name)
self._report.msg("Exporting Material '{}' as single user '{}'", bm.name, mat_name, indent=1)
hgmat = None
mat_name = f"{bm.name}_AutoSingle" if bo.name == bm.name else f"{bo.name}_{bm.name}"
self._report.msg(f"Exporting Material '{bm.name}' as single user '{mat_name}'")
else:
# Ensure that RT-lit objects don't infect the static-lit objects.
lighting_mod = bo.plasma_modifiers.lighting
@ -207,7 +206,7 @@ class MaterialConverter:
mat_prefix = ""
mat_prefix2 = "NonVtxP_" if self._exporter().mesh.is_nonpreshaded(bo, bm) else ""
mat_name = "".join((mat_prefix, mat_prefix2, bm.name))
self._report.msg("Exporting Material '{}'", mat_name, indent=1)
self._report.msg(f"Exporting Material '{mat_name}'")
hsgmat = self._mgr.find_key(hsGMaterial, name=mat_name, bl=bo)
if hsgmat is not None:
return hsgmat
@ -228,43 +227,44 @@ class MaterialConverter:
restart_pass_next = False
# Loop over layers
for idx, slot in slots:
# Prepend any BumpMapping magic layers
if slot.use_map_normal:
if bo in self._bump_mats:
raise ExportError("Material '{}' has more than one bumpmap layer".format(bm.name))
du, dw, dv = self.export_bumpmap_slot(bo, bm, hsgmat, slot, idx)
hsgmat.addLayer(du.key) # Du
hsgmat.addLayer(dw.key) # Dw
hsgmat.addLayer(dv.key) # Dv
if slot.use_stencil:
stencils.append((idx, slot))
else:
tex_name = "{}_{}".format(mat_name, slot.name)
tex_layer = self.export_texture_slot(bo, bm, hsgmat, slot, idx, name=tex_name)
if restart_pass_next:
tex_layer.state.miscFlags |= hsGMatState.kMiscRestartPassHere
restart_pass_next = False
hsgmat.addLayer(tex_layer.key)
with self._report.indent():
for idx, slot in slots:
# Prepend any BumpMapping magic layers
if slot.use_map_normal:
self._bump_mats[bo] = (tex_layer.UVWSrc, tex_layer.transform)
# After a bumpmap layer(s), the next layer *must* be in a
# new pass, otherwise it gets added in non-intuitive ways
restart_pass_next = True
if stencils:
tex_state = tex_layer.state
if not tex_state.blendFlags & hsGMatState.kBlendMask:
tex_state.blendFlags |= hsGMatState.kBlendAlpha
tex_state.miscFlags |= hsGMatState.kMiscRestartPassHere | hsGMatState.kMiscBindNext
curr_stencils = len(stencils)
for i in range(curr_stencils):
stencil_idx, stencil = stencils[i]
stencil_name = "STENCILGEN_{}@{}_{}".format(stencil.name, bm.name, slot.name)
stencil_layer = self.export_texture_slot(bo, bm, hsgmat, stencil, stencil_idx, name=stencil_name)
if i+1 < curr_stencils:
stencil_layer.state.miscFlags |= hsGMatState.kMiscBindNext
hsgmat.addLayer(stencil_layer.key)
if bo in self._bump_mats:
raise ExportError("Material '{}' has more than one bumpmap layer".format(bm.name))
du, dw, dv = self.export_bumpmap_slot(bo, bm, hsgmat, slot, idx)
hsgmat.addLayer(du.key) # Du
hsgmat.addLayer(dw.key) # Dw
hsgmat.addLayer(dv.key) # Dv
if slot.use_stencil:
stencils.append((idx, slot))
else:
tex_name = "{}_{}".format(mat_name, slot.name)
tex_layer = self.export_texture_slot(bo, bm, hsgmat, slot, idx, name=tex_name)
if restart_pass_next:
tex_layer.state.miscFlags |= hsGMatState.kMiscRestartPassHere
restart_pass_next = False
hsgmat.addLayer(tex_layer.key)
if slot.use_map_normal:
self._bump_mats[bo] = (tex_layer.UVWSrc, tex_layer.transform)
# After a bumpmap layer(s), the next layer *must* be in a
# new pass, otherwise it gets added in non-intuitive ways
restart_pass_next = True
if stencils:
tex_state = tex_layer.state
if not tex_state.blendFlags & hsGMatState.kBlendMask:
tex_state.blendFlags |= hsGMatState.kBlendAlpha
tex_state.miscFlags |= hsGMatState.kMiscRestartPassHere | hsGMatState.kMiscBindNext
curr_stencils = len(stencils)
for i in range(curr_stencils):
stencil_idx, stencil = stencils[i]
stencil_name = "STENCILGEN_{}@{}_{}".format(stencil.name, bm.name, slot.name)
stencil_layer = self.export_texture_slot(bo, bm, hsgmat, stencil, stencil_idx, name=stencil_name)
if i+1 < curr_stencils:
stencil_layer.state.miscFlags |= hsGMatState.kMiscBindNext
hsgmat.addLayer(stencil_layer.key)
# Plasma makes several assumptions that every hsGMaterial has at least one layer. If this
# material had no Textures, we will need to initialize a default layer
@ -293,7 +293,7 @@ class MaterialConverter:
layer.preshade = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
layer.runtime = hsColorRGBA(1.0, 1.0, 1.0, 1.0)
self.export_prepared_image(name=image_name, image=image, alpha_type=image_alpha,
owner=layer, allowed_formats={"DDS"}, indent=4)
owner=layer, allowed_formats={"DDS"})
material = self._mgr.add_object(hsGMaterial, bl=bo, name=name)
material.addLayer(layer.key)
return material, layer
@ -309,7 +309,7 @@ class MaterialConverter:
# exporting a DXT1 version. As of right now, opaque vs on_off does nothing, so we still
# get some turd-alpha data.
if image_alpha == TextureAlpha.full and not want_preshade:
self._report.warn("Using an alpha texture with a non-alpha blend mode -- this may look bad", indent=3)
self._report.warn("Using an alpha texture with a non-alpha blend mode -- this may look bad")
image_alpha = TextureAlpha.opaque
image_name = "DECALPRINT_{}".format(image.name)
else:
@ -326,15 +326,17 @@ class MaterialConverter:
if rt_key or pre_key:
return pre_key, rt_key
self._report.msg("Exporting Print Material '{}'", rtname, indent=3)
rt_material, rt_layer = make_print_material(rtname)
self._report.msg(f"Exporting Print Material '{rtname}'")
with self._report.indent():
rt_material, rt_layer = make_print_material(rtname)
if blend == hsGMatState.kBlendMult:
rt_layer.state.blendFlags |= hsGMatState.kBlendInvertFinalColor
rt_key = rt_material.key
if want_preshade:
self._report.msg("Exporting Print Material '{}'", prename, indent=3)
pre_material, pre_layer = make_print_material(prename)
self._report.msg(f"Exporting Print Material '{prename}'")
with self._report.indent():
pre_material, pre_layer = make_print_material(prename)
pre_material.compFlags |= hsGMaterial.kCompNeedsBlendChannel
pre_layer.state.miscFlags |= hsGMatState.kMiscBindNext | hsGMatState.kMiscRestartPassHere
pre_layer.preshade = hsColorRGBA(1.0, 1.0, 1.0, 1.0)
@ -346,7 +348,8 @@ class MaterialConverter:
blend_layer.state.ZFlags = hsGMatState.kZNoZWrite
blend_layer.ambient = hsColorRGBA(1.0, 1.0, 1.0, 1.0)
pre_material.addLayer(blend_layer.key)
self.export_alpha_blend("LINEAR", "HORIZONTAL", owner=blend_layer, indent=4)
with self._report.indent():
self.export_alpha_blend("LINEAR", "HORIZONTAL", owner=blend_layer)
pre_key = pre_material.key
else:
@ -354,10 +357,10 @@ class MaterialConverter:
return pre_key, rt_key
def export_waveset_material(self, bo, bm):
self._report.msg("Exporting WaveSet Material '{}'", bm.name, indent=1)
self._report.msg(f"Exporting WaveSet Material '{bm.name}'")
# WaveSets MUST have their own material
unique_name = "{}_WaveSet7".format(bm.name)
unique_name = f"{bm.name}_WaveSet7"
hsgmat = self._mgr.add_object(hsGMaterial, name=unique_name, bl=bo)
# Materials MUST have one layer. Wavesets need alpha blending...
@ -370,13 +373,13 @@ class MaterialConverter:
return hsgmat.key
def export_bumpmap_slot(self, bo, bm, hsgmat, slot, idx):
name = "{}_{}".format(hsgmat.key.name, slot.name)
self._report.msg("Exporting Plasma Bumpmap Layers for '{}'", name, indent=2)
name = f"{hsgmat.key.name}_{slot.name}"
self._report.msg(f"Exporting Plasma Bumpmap Layers for '{name}'")
# Okay, now we need to make 3 layers for the Du, Dw, and Dv
du_layer = self._mgr.find_create_object(plLayer, name="{}_DU_BumpLut".format(name), bl=bo)
dw_layer = self._mgr.find_create_object(plLayer, name="{}_DW_BumpLut".format(name), bl=bo)
dv_layer = self._mgr.find_create_object(plLayer, name="{}_DV_BumpLut".format(name), bl=bo)
du_layer = self._mgr.find_create_object(plLayer, name=f"{name}_DU_BumpLut", bl=bo)
dw_layer = self._mgr.find_create_object(plLayer, name=f"{name}_DW_BumpLut", bl=bo)
dv_layer = self._mgr.find_create_object(plLayer, name=f"{name}_DV_BumpLut", bl=bo)
for layer in (du_layer, dw_layer, dv_layer):
layer.ambient = hsColorRGBA(1.0, 1.0, 1.0, 1.0)
@ -419,118 +422,119 @@ class MaterialConverter:
def export_texture_slot(self, bo, bm, hsgmat, slot, idx, name=None, blend_flags=True):
if name is None:
name = "{}_{}".format(bm.name if bm is not None else bo.name, slot.name)
self._report.msg("Exporting Plasma Layer '{}'", name, indent=2)
name = f"{bm.name if bm is not None else bo.name}_{slot.name}"
self._report.msg(f"Exporting Plasma Layer '{name}'")
layer = self._mgr.find_create_object(plLayer, name=name, bl=bo)
if bm is not None and not slot.use_map_normal:
self._propagate_material_settings(bo, bm, slot, layer)
# UVW Channel
if slot.texture_coords == "UV":
for i, uvchan in enumerate(bo.data.uv_layers):
if uvchan.name == slot.uv_layer:
layer.UVWSrc = i
self._report.msg("Using UV Map #{} '{}'", i, name, indent=3)
break
else:
self._report.msg("No UVMap specified... Blindly using the first one, maybe it exists :|", indent=3)
with self._report.indent():
# UVW Channel
if slot.texture_coords == "UV":
for i, uvchan in enumerate(bo.data.uv_layers):
if uvchan.name == slot.uv_layer:
layer.UVWSrc = i
self._report.msg(f"Using UV Map #{i} '{name}'")
break
else:
self._report.msg("No UVMap specified... Blindly using the first one, maybe it exists :|")
# Transform
xform = hsMatrix44()
translation = hsVector3(slot.offset.x - (slot.scale.x - 1.0) / 2.0,
-slot.offset.y - (slot.scale.y - 1.0) / 2.0,
slot.offset.z - (slot.scale.z - 1.0) / 2.0)
xform.setTranslate(translation)
xform.setScale(hsVector3(*slot.scale))
layer.transform = xform
# Transform
xform = hsMatrix44()
translation = hsVector3(slot.offset.x - (slot.scale.x - 1.0) / 2.0,
-slot.offset.y - (slot.scale.y - 1.0) / 2.0,
slot.offset.z - (slot.scale.z - 1.0) / 2.0)
xform.setTranslate(translation)
xform.setScale(hsVector3(*slot.scale))
layer.transform = xform
wantStencil, canStencil = slot.use_stencil, slot.use_stencil and bm is not None and not slot.use_map_normal
if wantStencil and not canStencil:
self._exporter().report.warn("{} wants to stencil, but this is not a real Material".format(slot.name))
wantStencil, canStencil = slot.use_stencil, slot.use_stencil and bm is not None and not slot.use_map_normal
if wantStencil and not canStencil:
self._exporter().report.warn(f"{slot.name} wants to stencil, but this is not a real Material")
state = layer.state
if canStencil:
hsgmat.compFlags |= hsGMaterial.kCompNeedsBlendChannel
state.blendFlags |= hsGMatState.kBlendAlpha | hsGMatState.kBlendAlphaMult | hsGMatState.kBlendNoTexColor
state.ZFlags |= hsGMatState.kZNoZWrite
layer.ambient = hsColorRGBA(1.0, 1.0, 1.0, 1.0)
elif blend_flags:
# Standard layer flags ahoy
if slot.blend_type == "ADD":
state.blendFlags |= hsGMatState.kBlendAddColorTimesAlpha
elif slot.blend_type == "MULTIPLY":
state.blendFlags |= hsGMatState.kBlendMult
# Check if this layer uses diffuse/runtime lighting
if bm is not None and not slot.use_map_color_diffuse:
layer.preshade = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
layer.runtime = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
# Check if this layer uses specular lighting
if bm is not None and slot.use_map_color_spec:
state.shadeFlags |= hsGMatState.kShadeSpecular
else:
layer.specular = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
layer.specularPower = 1.0
texture = slot.texture
if texture.type == "BLEND":
hsgmat.compFlags |= hsGMaterial.kCompNeedsBlendChannel
# Handle material and per-texture emissive
if self._is_emissive(bm):
# If the previous slot's use_map_emit is different, then we need to flag this as a new
# pass so that the new emit color will be used. But only if it's not a doggone stencil.
if not wantStencil and bm is not None and slot is not None:
filtered_slots = tuple(filter(lambda x: x and x.use, bm.texture_slots[:idx]))
if filtered_slots:
prev_slot = filtered_slots[-1]
if prev_slot != slot and prev_slot.use_map_emit != slot.use_map_emit:
state.miscFlags |= hsGMatState.kMiscRestartPassHere
if self._is_emissive(bm, slot):
# Lightmapped emissive layers seem to cause cascading render issues. Skip flagging it
# and just hope that the ambient color bump is good enough.
if bo.plasma_modifiers.lightmap.bake_lightmap:
self._report.warn("A lightmapped and emissive material??? You like living dangerously...", indent=3)
else:
state.shadeFlags |= hsGMatState.kShadeEmissive
# Apply custom layer properties
wantBumpmap = bm is not None and slot.use_map_normal
if wantBumpmap:
state.blendFlags = hsGMatState.kBlendDot3
state.miscFlags = hsGMatState.kMiscBumpLayer
strength = max(min(1.0, slot.normal_factor), 0.0)
layer.ambient = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
layer.preshade = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
layer.runtime = hsColorRGBA(strength, 0.0, 0.0, 1.0)
layer.specular = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
else:
layer_props = texture.plasma_layer
layer.opacity = layer_props.opacity / 100
self._handle_layer_opacity(layer, layer_props.opacity)
if layer_props.alpha_halo:
state.blendFlags |= hsGMatState.kBlendAlphaTestHigh
if layer_props.z_bias:
state.ZFlags |= hsGMatState.kZIncLayer
if layer_props.skip_depth_test:
state.ZFlags |= hsGMatState.kZNoZRead
if layer_props.skip_depth_write:
state = layer.state
if canStencil:
hsgmat.compFlags |= hsGMaterial.kCompNeedsBlendChannel
state.blendFlags |= hsGMatState.kBlendAlpha | hsGMatState.kBlendAlphaMult | hsGMatState.kBlendNoTexColor
state.ZFlags |= hsGMatState.kZNoZWrite
layer.ambient = hsColorRGBA(1.0, 1.0, 1.0, 1.0)
elif blend_flags:
# Standard layer flags ahoy
if slot.blend_type == "ADD":
state.blendFlags |= hsGMatState.kBlendAddColorTimesAlpha
elif slot.blend_type == "MULTIPLY":
state.blendFlags |= hsGMatState.kBlendMult
# Export the specific texture type
self._tex_exporters[texture.type](bo, layer, slot, idx)
# Export any layer animations
# NOTE: animated stencils and bumpmaps are nonsense.
if not slot.use_stencil and not wantBumpmap:
layer = self._export_layer_animations(bo, bm, slot, idx, layer)
# Check if this layer uses diffuse/runtime lighting
if bm is not None and not slot.use_map_color_diffuse:
layer.preshade = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
layer.runtime = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
# Stash the top of the stack for later in the export
if bm is not None:
self._obj2layer[bo][bm][texture].append(layer.key)
return layer
# Check if this layer uses specular lighting
if bm is not None and slot.use_map_color_spec:
state.shadeFlags |= hsGMatState.kShadeSpecular
else:
layer.specular = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
layer.specularPower = 1.0
texture = slot.texture
if texture.type == "BLEND":
hsgmat.compFlags |= hsGMaterial.kCompNeedsBlendChannel
# Handle material and per-texture emissive
if self._is_emissive(bm):
# If the previous slot's use_map_emit is different, then we need to flag this as a new
# pass so that the new emit color will be used. But only if it's not a doggone stencil.
if not wantStencil and bm is not None and slot is not None:
filtered_slots = tuple(filter(lambda x: x and x.use, bm.texture_slots[:idx]))
if filtered_slots:
prev_slot = filtered_slots[-1]
if prev_slot != slot and prev_slot.use_map_emit != slot.use_map_emit:
state.miscFlags |= hsGMatState.kMiscRestartPassHere
if self._is_emissive(bm, slot):
# Lightmapped emissive layers seem to cause cascading render issues. Skip flagging it
# and just hope that the ambient color bump is good enough.
if bo.plasma_modifiers.lightmap.bake_lightmap:
self._report.warn("A lightmapped and emissive material??? You like living dangerously...")
else:
state.shadeFlags |= hsGMatState.kShadeEmissive
# Apply custom layer properties
wantBumpmap = bm is not None and slot.use_map_normal
if wantBumpmap:
state.blendFlags = hsGMatState.kBlendDot3
state.miscFlags = hsGMatState.kMiscBumpLayer
strength = max(min(1.0, slot.normal_factor), 0.0)
layer.ambient = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
layer.preshade = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
layer.runtime = hsColorRGBA(strength, 0.0, 0.0, 1.0)
layer.specular = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
else:
layer_props = texture.plasma_layer
layer.opacity = layer_props.opacity / 100
self._handle_layer_opacity(layer, layer_props.opacity)
if layer_props.alpha_halo:
state.blendFlags |= hsGMatState.kBlendAlphaTestHigh
if layer_props.z_bias:
state.ZFlags |= hsGMatState.kZIncLayer
if layer_props.skip_depth_test:
state.ZFlags |= hsGMatState.kZNoZRead
if layer_props.skip_depth_write:
state.ZFlags |= hsGMatState.kZNoZWrite
# Export the specific texture type
self._tex_exporters[texture.type](bo, layer, slot, idx)
# Export any layer animations
# NOTE: animated stencils and bumpmaps are nonsense.
if not slot.use_stencil and not wantBumpmap:
layer = self._export_layer_animations(bo, bm, slot, idx, layer)
# Stash the top of the stack for later in the export
if bm is not None:
self._obj2layer[bo][bm][texture].append(layer.key)
return layer
def _export_layer_animations(self, bo, bm, tex_slot, idx, base_layer) -> plLayer:
top_layer = base_layer
@ -715,7 +719,7 @@ class MaterialConverter:
# to a big "finalize" save step to prevent races. The texture cache would
# prevent that as well, so we could theoretically slice-and-dice the single
# image here... but... meh. Offloading taim.
self.export_prepared_image(texture=texture, owner=layer, indent=3,
self.export_prepared_image(texture=texture, owner=layer,
alpha_type=TextureAlpha.opaque, mipmap=True,
allowed_formats={"DDS"}, is_cube_map=True, tag="cubemap")
@ -732,7 +736,7 @@ class MaterialConverter:
oRes = bl_env.resolution
eRes = helpers.ensure_power_of_two(oRes)
if oRes != eRes:
self._report.msg("Overriding EnvMap size to ({}x{}) -- POT", eRes, eRes, indent=3)
self._report.msg(f"Overriding EnvMap size to ({eRes}x{eRes}) -- POT")
# And now for the general ho'hum-ness
pl_env = self._mgr.find_create_object(pl_class, bl=bo, name=name)
@ -791,7 +795,7 @@ class MaterialConverter:
if viewpt.type == "CAMERA":
warn = self._report.port if bl_env.mapping == "PLANE" else self._report.warn
warn("Environment Map '{}' is exporting as a cube map. The viewpoint '{}' is a camera, but only its position will be used.",
bl_env.id_data.name, viewpt.name, indent=5)
bl_env.id_data.name, viewpt.name)
# DEMs can do just a position vector. We actually prefer this because the WaveSet exporter
# will probably want to steal it for diabolical purposes... In MOUL, root objects are
@ -830,8 +834,7 @@ class MaterialConverter:
alpha_type = self._test_image_alpha(texture.image)
has_alpha = texture.use_calculate_alpha or slot.use_stencil or alpha_type != TextureAlpha.opaque
if (texture.image.use_alpha and texture.use_alpha) and not has_alpha:
warning = "'{}' wants to use alpha, but '{}' is opaque".format(texture.name, texture.image.name)
self._exporter().report.warn(warning, indent=3)
self._report.warn(f"'{texture.name}' wants to use alpha, but '{texture.image.name}' is opaque")
else:
alpha_type, has_alpha = TextureAlpha.opaque, False
@ -894,8 +897,7 @@ class MaterialConverter:
detail_fade_stop=layer_props.detail_fade_stop,
detail_opacity_start=layer_props.detail_opacity_start,
detail_opacity_stop=layer_props.detail_opacity_stop,
mipmap=mipmap, allowed_formats=allowed_formats,
indent=3)
mipmap=mipmap, allowed_formats=allowed_formats)
def _export_texture_type_none(self, bo, layer, slot, idx):
# We'll allow this, just for sanity's sake...
@ -911,14 +913,12 @@ class MaterialConverter:
texture = slot.texture
self.export_alpha_blend(texture.progression, texture.use_flip_axis, layer)
def export_alpha_blend(self, progression, axis, owner, indent=2):
def export_alpha_blend(self, progression, axis, owner):
"""This exports an alpha blend texture as exposed by bpy.types.BlendTexture.
The following arguments are expected:
- progression: (required)
- axis: (required)
- owner: (required) the Plasma object using this image
- indent: (optional) indentation level for log messages
default: 2
"""
# Certain blend types don't use an axis...
@ -1013,7 +1013,7 @@ class MaterialConverter:
image.pack(True)
self.export_prepared_image(image=image, owner=owner, allowed_formats={"BMP"},
alpha_type=TextureAlpha.full, indent=indent, ephemeral=True)
alpha_type=TextureAlpha.full, ephemeral=True)
def export_prepared_image(self, **kwargs):
"""This exports an externally prepared image and an optional owning layer.
@ -1026,8 +1026,6 @@ class MaterialConverter:
valid options: BMP, DDS, JPG, PNG
- extension: (optional) file extension to use for the image object
to use the image datablock extension, set this to None
- indent: (optional) indentation level for log messages
default: 2
- ephemeral: (optional) never cache this image
- tag: (optional) an optional identifier hint that allows multiple images with the
same name to coexist in the cache
@ -1035,15 +1033,14 @@ class MaterialConverter:
that must be split into six separate images for Plasma
"""
owner = kwargs.pop("owner", None)
indent = kwargs.pop("indent", 2)
key = _Texture(**kwargs)
image = key.image
if key not in self._pending:
self._report.msg("Stashing '{}' for conversion as '{}'", image.name, key, indent=indent)
self._report.msg("Stashing '{}' for conversion as '{}'", image.name, key)
self._pending[key] = [owner.key,]
else:
self._report.msg("Found another user of '{}'", key, indent=indent)
self._report.msg("Found another user of '{}'", key)
self._pending[key].append(owner.key)
def finalize(self):
@ -1064,43 +1061,44 @@ class MaterialConverter:
pClassName = "CubicEnvironmap" if key.is_cube_map else "Mipmap"
self._report.msg("\n[{} '{}']", pClassName, name)
image = key.image
# Now we try to use the pile of hints we were given to figure out what format to use
allowed_formats = key.allowed_formats
if key.mipmap:
compression = plBitmap.kDirectXCompression
elif "PNG" in allowed_formats and self._mgr.getVer() == pvMoul:
compression = plBitmap.kPNGCompression
elif "DDS" in allowed_formats:
compression = plBitmap.kDirectXCompression
elif "JPG" in allowed_formats:
compression = plBitmap.kJPEGCompression
elif "BMP" in allowed_formats:
compression = plBitmap.kUncompressed
else:
raise RuntimeError(allowed_formats)
dxt = plBitmap.kDXT5 if key.alpha_type == TextureAlpha.full else plBitmap.kDXT1
# Mayhaps we have a cached version of this that has already been exported
cached_image = texcache.get_from_texture(key, compression)
with self._report.indent():
image = key.image
# Now we try to use the pile of hints we were given to figure out what format to use
allowed_formats = key.allowed_formats
if key.mipmap:
compression = plBitmap.kDirectXCompression
elif "PNG" in allowed_formats and self._mgr.getVer() == pvMoul:
compression = plBitmap.kPNGCompression
elif "DDS" in allowed_formats:
compression = plBitmap.kDirectXCompression
elif "JPG" in allowed_formats:
compression = plBitmap.kJPEGCompression
elif "BMP" in allowed_formats:
compression = plBitmap.kUncompressed
else:
raise RuntimeError(allowed_formats)
dxt = plBitmap.kDXT5 if key.alpha_type == TextureAlpha.full else plBitmap.kDXT1
if cached_image is None:
numLevels, width, height, data = self._finalize_cache(texcache, key, image, name, compression, dxt)
self._finalize_bitmap(key, owners, name, numLevels, width, height, compression, dxt, data)
else:
width, height = cached_image.export_size
data = cached_image.image_data
numLevels = cached_image.mip_levels
# Mayhaps we have a cached version of this that has already been exported
cached_image = texcache.get_from_texture(key, compression)
# If the cached image data is junk, PyHSPlasma will raise a RuntimeError,
# so we'll attempt a recache...
try:
self._finalize_bitmap(key, owners, name, numLevels, width, height, compression, dxt, data)
except RuntimeError:
self._report.warn("Cached image is corrupted! Recaching image...", indent=1)
if cached_image is None:
numLevels, width, height, data = self._finalize_cache(texcache, key, image, name, compression, dxt)
self._finalize_bitmap(key, owners, name, numLevels, width, height, compression, dxt, data)
else:
width, height = cached_image.export_size
data = cached_image.image_data
numLevels = cached_image.mip_levels
# If the cached image data is junk, PyHSPlasma will raise a RuntimeError,
# so we'll attempt a recache...
try:
self._finalize_bitmap(key, owners, name, numLevels, width, height, compression, dxt, data)
except RuntimeError:
self._report.warn("Cached image is corrupted! Recaching image...")
numLevels, width, height, data = self._finalize_cache(texcache, key, image, name, compression, dxt)
self._finalize_bitmap(key, owners, name, numLevels, width, height, compression, dxt, data)
inc_progress()
@ -1111,45 +1109,46 @@ class MaterialConverter:
# business to account for per-page textures
pages = {}
self._report.msg("Adding to...", indent=1)
for owner_key in owners:
owner = owner_key.object
self._report.msg("[{} '{}']", owner.ClassName()[2:], owner_key.name, indent=2)
page = mgr.get_textures_page(owner_key) # Layer's page or Textures.prp
# If we haven't created this texture in the page (either layer's page or Textures.prp),
# then we need to do that and stuff the level data. This is a little tedious, but we
# need to be careful to manage our resources correctly
if page not in pages:
mipmap = plMipmap(name=name, width=width, height=height, numLevels=numLevels,
compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt)
if key.is_cube_map:
assert len(data) == 6
texture = plCubicEnvironmap(name)
for face_name, face_data in zip(BLENDER_CUBE_MAP, data):
self._report.msg("Adding to...")
with self._report.indent():
for owner_key in owners:
owner = owner_key.object
self._report.msg(f"[{owner.ClassName()[2:]} '{owner_key.name}']")
page = mgr.get_textures_page(owner_key) # Layer's page or Textures.prp
# If we haven't created this texture in the page (either layer's page or Textures.prp),
# then we need to do that and stuff the level data. This is a little tedious, but we
# need to be careful to manage our resources correctly
if page not in pages:
mipmap = plMipmap(name=name, width=width, height=height, numLevels=numLevels,
compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt)
if key.is_cube_map:
assert len(data) == 6
texture = plCubicEnvironmap(name)
for face_name, face_data in zip(BLENDER_CUBE_MAP, data):
for i in range(numLevels):
mipmap.setLevel(i, face_data[i])
setattr(texture, face_name, mipmap)
else:
assert len(data) == 1
for i in range(numLevels):
mipmap.setLevel(i, face_data[i])
setattr(texture, face_name, mipmap)
else:
assert len(data) == 1
for i in range(numLevels):
mipmap.setLevel(i, data[0][i])
texture = mipmap
mipmap.setLevel(i, data[0][i])
texture = mipmap
mgr.AddObject(page, texture)
pages[page] = texture
else:
texture = pages[page]
# The object that references this image can be either a layer (will appear
# in the 3d world) or an image library (will appear in a journal or in another
# dynamic manner in game)
if isinstance(owner, plLayerInterface):
owner.texture = texture.key
elif isinstance(owner, plImageLibMod):
owner.addImage(texture.key)
else:
raise NotImplementedError(owner.ClassName())
mgr.AddObject(page, texture)
pages[page] = texture
else:
texture = pages[page]
# The object that references this image can be either a layer (will appear
# in the 3d world) or an image library (will appear in a journal or in another
# dynamic manner in game)
if isinstance(owner, plLayerInterface):
owner.texture = texture.key
elif isinstance(owner, plImageLibMod):
owner.addImage(texture.key)
else:
raise NotImplementedError(owner.ClassName())
def _finalize_cache(self, texcache, key, image, name, compression, dxt):
if key.is_cube_map:
@ -1162,7 +1161,7 @@ class MaterialConverter:
def _finalize_cube_map(self, key, image, name, compression, dxt):
oWidth, oHeight = image.size
if oWidth == 0 and oHeight == 0:
raise ExportError("Image '{}' could not be loaded.".format(image.name))
raise ExportError(f"Image '{image.name}' could not be loaded.")
# Non-DXT images are BGRA in Plasma
bgra = compression != plBitmap.kDirectXCompression
@ -1177,7 +1176,7 @@ class MaterialConverter:
# something funky.
if oWidth != cWidth or oHeight != cHeight:
self._report.warn("Image was resized by Blender to ({}x{})--resizing the resize to ({}x{})",
cWidth, cHeight, oWidth, oHeight, indent=1)
cWidth, cHeight, oWidth, oHeight)
data = scale_image(data, cWidth, cHeight, oWidth, oHeight)
# Face dimensions
@ -1213,14 +1212,14 @@ class MaterialConverter:
name = face_name[:-4].upper()
if compression == plBitmap.kDirectXCompression:
numLevels = glimage.num_levels
self._report.msg("Generating mip levels for cube face '{}'", name, indent=1)
self._report.msg("Generating mip levels for cube face '{}'", name)
# If we're compressing this mofo, we'll need a temporary mipmap to do that here...
mipmap = plMipmap(name=name, width=eWidth, height=eHeight, numLevels=numLevels,
compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt)
else:
numLevels = 1
self._report.msg("Compressing single level for cube face '{}'", name, indent=1)
self._report.msg("Compressing single level for cube face '{}'", name)
face_images[i] = [None] * numLevels
for j in range(numLevels):
@ -1244,7 +1243,7 @@ class MaterialConverter:
eWidth, eHeight = glimage.size_pot
if compression == plBitmap.kDirectXCompression:
numLevels = glimage.num_levels
self._report.msg("Generating mip levels", indent=1)
self._report.msg("Generating mip levels")
# If this is a DXT-compressed mipmap, we need to use a temporary mipmap
# to do the compression. We'll then steal the data from it.
@ -1252,7 +1251,7 @@ class MaterialConverter:
compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt)
else:
numLevels = 1
self._report.msg("Compressing single level", indent=1)
self._report.msg("Compressing single level")
# Hold the uncompressed level data for now. We may have to make multiple copies of
# this mipmap for per-page textures :(
@ -1281,7 +1280,7 @@ class MaterialConverter:
yield from filter(None, self._obj2layer[bo][bm][tex])
return
if bo is None and bm is None and tex is None:
self._exporter().report.warn("Asking for all the layers we've ever exported, eh? You like living dangerously.", indent=2)
self._exporter().report.warn("Asking for all the layers we've ever exported, eh? You like living dangerously.")
# What we want to do is filter _obj2layers:
# bo if set, or all objects

31
korman/exporter/mesh.py

@ -357,22 +357,24 @@ class MeshConverter(_MeshManager):
self._report.progress_range = len(self._dspans)
inc_progress = self._report.progress_increment
log_msg = self._report.msg
indent = self._report.indent
log_msg("\nFinalizing Geometry")
for loc in self._dspans.values():
for dspan in loc.values():
log_msg("[DrawableSpans '{}']", dspan.key.name, indent=1)
# This mega-function does a lot:
# 1. Converts SourceSpans (geospans) to Icicles and bakes geometry into plGBuffers
# 2. Calculates the Icicle bounds
# 3. Builds the plSpaceTree
# 4. Clears the SourceSpans
dspan.composeGeometry(True, True)
inc_progress()
with indent():
for loc in self._dspans.values():
for dspan in loc.values():
log_msg("[DrawableSpans '{}']", dspan.key.name)
# This mega-function does a lot:
# 1. Converts SourceSpans (geospans) to Icicles and bakes geometry into plGBuffers
# 2. Calculates the Icicle bounds
# 3. Builds the plSpaceTree
# 4. Clears the SourceSpans
dspan.composeGeometry(True, True)
inc_progress()
def _export_geometry(self, bo, mesh, materials, geospans, mat2span_LUT):
self._report.msg("Converting geometry from '{}'...", mesh.name, indent=1)
self._report.msg(f"Converting geometry from '{mesh.name}'...")
# Recall that materials is a mapping of exported materials to blender material indices.
# Therefore, geodata maps blender material indices to working geometry data.
@ -633,7 +635,7 @@ class MeshConverter(_MeshManager):
for i in geospans:
dspan = self._find_create_dspan(bo, i.geospan, i.pass_index)
self._report.msg("Exported hsGMaterial '{}' geometry into '{}'",
i.geospan.material.name, dspan.key.name, indent=1)
i.geospan.material.name, dspan.key.name)
idx = dspan.addSourceSpan(i.geospan)
diidx = _diindices.setdefault(dspan, [])
diidx.append(idx)
@ -652,8 +654,7 @@ class MeshConverter(_MeshManager):
waveset_mod = bo.plasma_modifiers.water_basic
if waveset_mod.enabled:
if len(materials) > 1:
msg = "'{}' is a WaveSet -- only one material is supported".format(bo.name)
self._exporter().report.warn(msg, indent=1)
self._report.warn(f"'{bo.name}' is a WaveSet -- only one material is supported")
blmat = materials[0][1]
self._check_vtx_nonpreshaded(bo, mesh, 0, blmat)
matKey = self.material.export_waveset_material(bo, blmat)

8
korman/exporter/outfile.py

@ -318,11 +318,11 @@ class OutputFiles:
py_code = "{}\n\n{}\n".format(i.file_data, plasma_python_glue)
else:
py_code = i.file_data
result, pyc = korlib.compyle(i.filename, py_code, py_version, report, indent=1)
result, pyc = korlib.compyle(i.filename, py_code, py_version, report)
if result:
pyc_objects.append((i.filename, pyc))
except korlib.PythonNotAvailableError as error:
report.warn("Python {} is not available. Your Age scripts were not packaged.", error, indent=1)
report.warn(f"Python {error} is not available. Your Age scripts were not packaged.")
else:
if pyc_objects:
with self.generate_dat_file("{}.pak".format(self._exporter().age_name),
@ -392,7 +392,7 @@ class OutputFiles:
shutil.copy2(i.file_path, dst_path)
else:
report.warn("No data found for dependency file '{}'. It will not be copied into the export directory.",
PurePath(i.dirname, i.filename), indent=1)
PurePath(i.dirname, i.filename))
def _write_gather_build(self):
report = self._exporter().report
@ -458,7 +458,7 @@ class OutputFiles:
elif i.file_path:
zf.write(i.file_path, arcpath)
else:
report.warn("No data found for dependency file '{}'. It will not be archived.", arcpath, indent=1)
report.warn(f"No data found for dependency file '{arcpath}'. It will not be archived.")
@property
def _version(self):

6
korman/exporter/physics.py

@ -261,7 +261,7 @@ class PhysicsConverter:
member_group = getattr(plSimDefs, kwargs.get("member_group", "kGroupLOSOnly"))
if physical.memberGroup != member_group and member_group != plSimDefs.kGroupLOSOnly:
self._report.warn("{}: Physical memberGroup overwritten!", bo.name, indent=2)
self._report.warn(f"{bo.name}: Physical memberGroup overwritten!")
physical.memberGroup = member_group
# Sanity checking: only TPotS/Havok fully supports triangle mesh detector regions.
@ -271,7 +271,7 @@ class PhysicsConverter:
if physical.memberGroup == plSimDefs.kGroupDetector and physical.boundsType in (plSimDefs.kExplicitBounds, plSimDefs.kProxyBounds):
msg = f"'{bo.name}': Triangle mesh regions are poorly supported. Use a convex hull or box instead."
if ver <= pvPots:
self._report.port(msg, indent=2)
self._report.port(msg)
else:
raise ExportError(msg)
@ -297,7 +297,7 @@ class PhysicsConverter:
if volume < 0.001:
self._report.warn(
"{}: Physical wants to be a convex hull but appears to be flat (volume={}), forcing to triangle mesh...",
bo.name, volume, indent=2
bo.name, volume
)
self._export_trimesh(bo, physical, local_space, mat)

8
korman/exporter/python.py

@ -52,7 +52,7 @@ class PythonPackageExporter:
code = source
code = "{}\n\n{}\n".format(code, plasma_python_glue)
success, result = korlib.compyle(filename, code, py_version, report, indent=1)
success, result = korlib.compyle(filename, code, py_version, report)
if not success:
raise ExportError("Failed to compyle '{}':\n{}".format(filename, result))
py_code.append((filename, result))
@ -68,7 +68,7 @@ class PythonPackageExporter:
code = source
# no glue needed here, ma!
success, result = korlib.compyle(filename, code, py_version, report, indent=1)
success, result = korlib.compyle(filename, code, py_version, report)
if not success:
raise ExportError("Failed to compyle '{}':\n{}".format(filename, result))
py_code.append((filename, result))
@ -88,10 +88,10 @@ class PythonPackageExporter:
if age_py.plasma_text.package or age.python_method == "all":
self._pfms[py_filename] = age_py
else:
report.warn("AgeSDL Python Script provided, but not requested for packing... Using default Python.", indent=1)
report.warn("AgeSDL Python Script provided, but not requested for packing... Using default Python.")
self._pfms[py_filename] = very_very_special_python.format(age_name=fixed_agename)
else:
report.msg("Packing default AgeSDL Python", indent=1)
report.msg("Packing default AgeSDL Python")
very_very_special_python.format(age_name=age_props.age_name)
self._pfms[py_filename] = very_very_special_python.format(age_name=fixed_agename)

106
korman/exporter/rtlight.py

@ -50,19 +50,19 @@ class LightConverter:
# If you change these calculations, be sure to update the AnimationConverter!
intens, attenEnd = self.convert_attenuation(bl)
if bl.falloff_type == "CONSTANT":
self._report.msg("Attenuation: No Falloff", indent=2)
self._report.msg("Attenuation: No Falloff")
pl.attenConst = intens
pl.attenLinear = 0.0
pl.attenQuadratic = 0.0
pl.attenCutoff = attenEnd
elif bl.falloff_type == "INVERSE_LINEAR":
self._report.msg("Attenuation: Inverse Linear", indent=2)
self._report.msg("Attenuation: Inverse Linear")
pl.attenConst = 1.0
pl.attenLinear = self.convert_attenuation_linear(intens, attenEnd)
pl.attenQuadratic = 0.0
pl.attenCutoff = attenEnd
elif bl.falloff_type == "INVERSE_SQUARE":
self._report.msg("Attenuation: Inverse Square", indent=2)
self._report.msg("Attenuation: Inverse Square")
pl.attenConst = 1.0
pl.attenLinear = 0.0
pl.attenQuadratic = self.convert_attenuation_quadratic(intens, attenEnd)
@ -82,19 +82,21 @@ class LightConverter:
return max(0.0, (intensity * _FAR_POWER - 1.0) / pow(end, 2))
def _convert_area_lamp(self, bl, pl):
self._report.msg("[LimitedDirLightInfo '{}']", bl.name, indent=1)
self._report.msg("[LimitedDirLightInfo '{}']", bl.name)
pl.width = bl.size
pl.depth = bl.size if bl.shape == "SQUARE" else bl.size_y
pl.height = bl.plasma_lamp.size_height
def _convert_point_lamp(self, bl, pl):
self._report.msg("[OmniLightInfo '{}']", bl.name, indent=1)
self._convert_attenuation(bl, pl)
self._report.msg("[OmniLightInfo '{}']", bl.name)
with self._report.indent():
self._convert_attenuation(bl, pl)
def _convert_spot_lamp(self, bl, pl):
self._report.msg("[SpotLightInfo '{}']", bl.name, indent=1)
self._convert_attenuation(bl, pl)
self._report.msg("[SpotLightInfo '{}']", bl.name)
with self._report.indent():
self._convert_attenuation(bl, pl)
# Spot lights have a few more things...
spot_size = bl.spot_size
@ -109,7 +111,7 @@ class LightConverter:
pl.falloff = 1.0
def _convert_sun_lamp(self, bl, pl):
self._report.msg("[DirectionalLightInfo '{}']", bl.name, indent=1)
self._report.msg("[DirectionalLightInfo '{}']", bl.name)
def export_rtlight(self, so, bo):
bl_light = bo.data
@ -139,18 +141,18 @@ class LightConverter:
# Apply the colors
if bl_light.use_diffuse:
self._report.msg("Diffuse: {}", diff_str, indent=2)
self._report.msg(f"Diffuse: {diff_str}")
pl_light.diffuse = hsColorRGBA(*diff_color)
else:
self._report.msg("Diffuse: OFF", indent=2)
self._report.msg("Diffuse: OFF")
pl_light.diffuse = hsColorRGBA(0.0, 0.0, 0.0, energy)
if bl_light.use_specular:
self._report.msg("Specular: {}", spec_str, indent=2)
self._report.msg(f"Specular: {spec_str}")
pl_light.setProperty(plLightInfo.kLPHasSpecular, True)
pl_light.specular = hsColorRGBA(*spec_color)
else:
self._report.msg("Specular: OFF", indent=2)
self._report.msg(f"Specular: OFF")
pl_light.specular = hsColorRGBA(0.0, 0.0, 0.0, energy)
rtlamp = bl_light.plasma_lamp
@ -207,7 +209,7 @@ class LightConverter:
# projection Lamp with our own faux Material. Unfortunately, Plasma only supports projecting
# one layer. We could exploit the fUnderLay and fOverLay system to export everything, but meh.
if len(tex_slots) > 1:
self._report.warn("Only one texture slot can be exported per Lamp. Picking the first one: '{}'".format(slot.name), indent=3)
self._report.warn(f"Only one texture slot can be exported per Lamp. Picking the first one: '{slot.name}'")
layer = mat.export_texture_slot(bo, None, None, slot, 0, blend_flags=False)
state = layer.state
@ -250,50 +252,50 @@ class LightConverter:
def find_material_light_keys(self, bo, bm):
"""Given a blender material, we find the keys of all matching Plasma RT Lights.
NOTE: We return a tuple of lists: ([permaLights], [permaProjs])"""
self._report.msg("Searching for runtime lights...", indent=1)
self._report.msg("Searching for runtime lights...")
permaLights = []
permaProjs = []
# We're going to inspect the material's light group.
# If there is no light group, we'll say that there is no runtime lighting...
# If there is, we will harvest all Blender lamps in that light group that are Plasma Objects
lg = bm.light_group
if lg is not None:
for obj in lg.objects:
if obj.type != "LAMP":
# moronic...
continue
elif not obj.plasma_object.enabled:
# who cares?
continue
lamp = obj.data
# Check to see if they only want this light to work on its layer...
if lamp.use_own_layer:
# Pairs up elements from both layers sequences such that we can compare
# to see if the lamp and object are in the same layer.
# If you can think of a better way, be my guest.
test = zip(bo.layers, obj.layers)
for i in test:
if i == (True, True):
break
else:
# didn't find a layer where both lamp and object were, skip it.
self._report.msg("[{}] '{}': not in same layer, skipping...",
lamp.type, obj.name, indent=2)
with self._report.indent():
# We're going to inspect the material's light group.
# If there is no light group, we'll say that there is no runtime lighting...
# If there is, we will harvest all Blender lamps in that light group that are Plasma Objects
lg = bm.light_group
if lg is not None:
for obj in lg.objects:
if obj.type != "LAMP":
# moronic...
continue
# This is probably where PermaLight vs PermaProj should be sorted out...
pl_light = self.get_light_key(obj, lamp, None)
if self._is_projection_lamp(lamp):
self._report.msg("[{}] PermaProj '{}'", lamp.type, obj.name, indent=2)
permaProjs.append(pl_light)
else:
self._report.msg("[{}] PermaLight '{}'", lamp.type, obj.name, indent=2)
permaLights.append(pl_light)
elif not obj.plasma_object.enabled:
# who cares?
continue
lamp = obj.data
# Check to see if they only want this light to work on its layer...
if lamp.use_own_layer:
# Pairs up elements from both layers sequences such that we can compare
# to see if the lamp and object are in the same layer.
# If you can think of a better way, be my guest.
test = zip(bo.layers, obj.layers)
for i in test:
if i == (True, True):
break
else:
# didn't find a layer where both lamp and object were, skip it.
self._report.msg(f"[{lamp.type}] '{obj.name}': not in same layer, skipping...")
continue
# This is probably where PermaLight vs PermaProj should be sorted out...
pl_light = self.get_light_key(obj, lamp, None)
if self._is_projection_lamp(lamp):
self._report.msg(f"[{lamp.type}] PermaProj '{obj.name}'")
permaProjs.append(pl_light)
else:
self._report.msg(f"[{lamp.type}] PermaLight '{obj.name}'", lamp.type, obj.name)
permaLights.append(pl_light)
if len(permaLights) > 8:
self._report.warn("More than 8 RT lamps on material: '{}'", bm.name, indent=1)
self._report.warn(f"More than 8 RT lamps on material: '{bm.name}'")
return (permaLights, permaProjs)

10
korman/korlib/python.py

@ -24,7 +24,7 @@ class PythonNotAvailableError(Exception):
pass
def compyle(file_name, py_code, py_version, report=None, indent=0):
def compyle(file_name, py_code, py_version, report=None):
# NOTE: Should never run under Python 2.x
my_version = sys.version_info[:2]
assert my_version == (2, 7) or my_version[0] > 2
@ -34,10 +34,10 @@ def compyle(file_name, py_code, py_version, report=None, indent=0):
if idx == -1:
module_name = file_name
else:
module_name = file_name[:idx]
module_name = file_name[:idx]
if report is not None:
report.msg("Compyling {}", file_name, indent=indent)
report.msg("Compyling {}", file_name)
if my_version != py_version:
import subprocess
@ -48,7 +48,7 @@ def compyle(file_name, py_code, py_version, report=None, indent=0):
py_code = py_code.encode("utf-8")
except UnicodeError:
if report is not None:
report.error("Could not encode '{}'", file_name, indent=indent+1)
report.error("Could not encode '{}'", file_name, indent=report.indent_level+1)
return (False, "Could not encode file")
result = subprocess.run(args, input=py_code, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if result.returncode != 0:
@ -57,7 +57,7 @@ def compyle(file_name, py_code, py_version, report=None, indent=0):
except UnicodeError:
error = result.stdout
if report is not None:
report.error("Compylation Error in '{}'\n{}", file_name, error, indent=indent+1)
report.error("Compylation Error in '{}'\n{}", file_name, error, indent=report.indent_level+1)
return (result.returncode == 0, result.stdout)
else:
raise NotImplementedError()

4
korman/korlib/texture.py

@ -160,7 +160,7 @@ class GLTexture:
self._texkey.detail_opacity_start / 100.0,
self._texkey.detail_opacity_stop / 100.0)
def get_level_data(self, level=0, calc_alpha=False, report=None, indent=2, fast=False):
def get_level_data(self, level=0, calc_alpha=False, report=None, fast=False):
"""Gets the uncompressed pixel data for a requested mip level, optionally calculating the alpha
channel from the image color data
"""
@ -175,7 +175,7 @@ class GLTexture:
eHeight = ensure_power_of_two(oHeight) >> level
if report is not None:
report.msg("Level #{}: {}x{}", level, eWidth, eHeight, indent=indent)
report.msg("Level #{}: {}x{}", level, eWidth, eHeight)
# Scale, if needed...
if oWidth != eWidth or oHeight != eHeight:

4
korman/nodes/node_avatar.py

@ -70,7 +70,7 @@ class PlasmaSittingBehaviorNode(PlasmaNodeBase, bpy.types.Node):
if i is not None:
sitmod.addNotifyKey(i.get_key(exporter, so))
else:
exporter.report.warn("'{}' Node '{}' doesn't expose a key. It won't be triggered by '{}'!".format(i.bl_idname, i.name, self.name), indent=3)
exporter.report.warn(f"'{i.bl_idname}' Node '{i.name}' doesn't expose a key. It won't be triggered by '{self.name}'!")
@property
def requires_actor(self):
@ -401,7 +401,7 @@ class PlasmaMultiStageBehaviorNode(PlasmaNodeBase, bpy.types.Node):
msbmod.addReceiver(key)
else:
exporter.report.warn("'{}' Node '{}' doesn't expose a key. It won't be triggered by '{}'!",
node.bl_idname, node.name, self.name, indent=3)
node.bl_idname, node.name, self.name)
@property
def requires_actor(self):

11
korman/nodes/node_core.py

@ -28,7 +28,7 @@ class PlasmaNodeBase:
for i in self.find_outputs(socket_id, idname):
key = i.get_key(exporter, so)
if key is None:
exporter.report.warn(" '{}' Node '{}' doesn't expose a key. It won't be triggered by '{}'!".format(i.bl_idname, i.name, self.name), indent=3)
exporter.report.warn(f"'{i.bl_idname}' Node '{i.name}' doesn't expose a key. It won't be triggered by '{self.name}'!")
elif isinstance(key, tuple):
for i in key:
notify.addReceiver(key)
@ -498,10 +498,11 @@ class PlasmaNodeTree(bpy.types.NodeTree):
def export(self, exporter, bo, so):
exported_nodes = exporter.exported_nodes.setdefault(self.name, set())
for node in self.nodes:
if not (node.export_once and node.previously_exported(exporter)):
node.export(exporter, bo, so)
exported_nodes.add(node.name)
with exporter.report.indent():
for node in self.nodes:
if not (node.export_once and node.previously_exported(exporter)):
node.export(exporter, bo, so)
exported_nodes.add(node.name)
def find_output(self, idname):
for node in self.nodes:

13
korman/nodes/node_python.py

@ -279,12 +279,12 @@ class PlasmaPythonFileNode(PlasmaVersionedNode, bpy.types.Node):
# Check to see if we should pack this file
if exporter.output.want_py_text(self.text_id):
exporter.report.msg("Including Python '{}' for package", self.filename, indent=3)
exporter.report.msg("Including Python '{}' for package", self.filename)
exporter.output.add_python_mod(self.filename, text_id=self.text_id)
# PFMs can have their own SDL...
sdl_text = bpy.data.texts.get("{}.sdl".format(py_name), None)
if sdl_text is not None:
exporter.report.msg("Including corresponding SDL '{}'", sdl_text.name, indent=3)
exporter.report.msg("Including corresponding SDL '{}'", sdl_text.name)
exporter.output.add_sdl(sdl_text.name, text_id=sdl_text)
# Handle exporting the Python Parameters
@ -312,14 +312,13 @@ class PlasmaPythonFileNode(PlasmaVersionedNode, bpy.types.Node):
# an animated lamp.
if not bool(bo.users_group):
for light in exporter.mgr.find_interfaces(plLightInfo, so):
exporter.report.msg("Marking RT light '{}' as animated due to usage in a Python File node",
so.key.name, indent=3)
exporter.report.msg(f"Marking RT light '{so.key.name}' as animated due to usage in a Python File node", so.key.name)
light.setProperty(plLightInfo.kLPMovable, True)
def _export_key_attrib(self, exporter, bo, so : plSceneObject, key : plKey, socket) -> None:
if key is None:
exporter.report.warn("Attribute '{}' didn't return a key and therefore will be unavailable to Python",
self.id_data.name, socket.links[0].name, indent=3)
self.id_data.name, socket.links[0].name)
return
key_type = _attrib_key_types[socket.attribute_type]
@ -330,7 +329,7 @@ class PlasmaPythonFileNode(PlasmaVersionedNode, bpy.types.Node):
if not good_key:
exporter.report.warn("'{}' Node '{}' returned an unexpected key type '{}'",
self.id_data.name, socket.links[0].from_node.name,
plFactory.ClassName(key.type), indent=3)
plFactory.ClassName(key.type))
if isinstance(key.object, plSceneObject):
self._export_ancillary_sceneobject(exporter, bo, key.object)
@ -948,7 +947,7 @@ class PlasmaAttribTextureNode(idprops.IDPropMixin, PlasmaAttribNodeBase, bpy.typ
remainder = sum((1 for i in result))
if remainder > 1:
exporter.report.warn("'{}.{}': Expected a single layer, but mapped to {}. Make the settings more specific.",
self.id_data.name, self.path_from_id(), remainder + 1, indent=2)
self.id_data.name, self.path_from_id(), remainder + 1)
if result is not None:
yield result
else:

24
korman/operators/op_image.py

@ -124,10 +124,10 @@ class PlasmaBuildCubeMapOperator(ImageOperator, bpy.types.Operator):
face_path = filepath[:idx+1] + suffix + filepath[idx+3:]
face_name = key[:-4].upper()
if Path(face_path).is_file():
self._report.msg("Found face '{}': {}", face_name, face_path, indent=1)
self._report.msg("Found face '{}': {}", face_name, face_path)
files.append(face_path)
else:
self._report.warn("Using default face data for face '{}'", face_name, indent=1)
self._report.warn("Using default face data for face '{}'", face_name)
files.append(None)
self._report.progress_increment()
return tuple(files)
@ -226,14 +226,14 @@ class PlasmaBuildCubeMapOperator(ImageOperator, bpy.types.Operator):
# Insert grumbling here about tuples being immutable...
result_data = list(face_data)
for i in range(len(BLENDER_CUBE_MAP)):
face_width, face_height = face_widths[i], face_heights[i]
if face_width != min_width or face_height != min_height:
face_name = BLENDER_CUBE_MAP[i][:-4].upper()
self._report.msg("Resizing face '{}' from {}x{} to {}x{}", face_name,
face_width, face_height, min_width, min_height,
indent=1)
result_data[i] = scale_image(face_data[i], face_width, face_height,
min_width, min_height)
self._report.progress_increment()
with self._report.indent():
for i in range(len(BLENDER_CUBE_MAP)):
face_width, face_height = face_widths[i], face_heights[i]
if face_width != min_width or face_height != min_height:
face_name = BLENDER_CUBE_MAP[i][:-4].upper()
self._report.msg("Resizing face '{}' from {}x{} to {}x{}", face_name,
face_width, face_height, min_width, min_height)
result_data[i] = scale_image(face_data[i], face_width, face_height,
min_width, min_height)
self._report.progress_increment()
return min_width, min_height, tuple(result_data)

14
korman/properties/modifiers/anim.py

@ -70,7 +70,8 @@ class PlasmaAnimationModifier(ActionModifier, PlasmaModifierProperties):
def convert_object_animations(self, exporter, bo, so, anims: Optional[Iterable] = None):
if not anims:
anims = [self.subanimations.entire_animation]
aganims = list(self._export_ag_anims(exporter, bo, so, anims))
with exporter.report.indent():
aganims = list(self._export_ag_anims(exporter, bo, so, anims))
# Defer creation of the private animation until after the converter has been executed.
# Just because we have some FCurves doesn't mean they will produce anything particularly
@ -100,8 +101,7 @@ class PlasmaAnimationModifier(ActionModifier, PlasmaModifierProperties):
applicators = converter.convert_object_animations(bo, so, anim_name, start=start, end=end)
if not applicators:
exporter.report.warn("Animation '{}' generated no applicators. Nothing will be exported.",
anim_name, indent=2)
exporter.report.warn(f"Animation '{anim_name}' generated no applicators. Nothing will be exported.")
continue
pClass = plAgeGlobalAnim if anim.sdl_var else plATCAnim
@ -243,12 +243,12 @@ class PlasmaAnimationGroupModifier(ActionModifier, PlasmaModifierProperties):
continue
if not child_bo.plasma_object.has_animation_data:
msg = "Animation Group '{}' specifies an object '{}' with no valid animation data. Ignoring..."
exporter.report.warn(msg, self.key_name, child_bo.name, indent=2)
exporter.report.warn(msg, self.key_name, child_bo.name)
continue
child_animation = child_bo.plasma_modifiers.animation
if not child_animation.enabled:
msg = "Animation Group '{}' specifies an object '{}' with no Plasma Animation modifier. Ignoring..."
exporter.report.warn(msg, self.key_name, child_bo.name, indent=2)
exporter.report.warn(msg, self.key_name, child_bo.name)
continue
child_agmod, child_agmaster = exporter.animation.get_anigraph_objects(bo=child_bo)
msgfwd.addForwardKey(child_agmaster.key)
@ -294,10 +294,10 @@ class PlasmaAnimationLoopModifier(ActionModifier, PlasmaModifierProperties):
end = markers.get(loop.loop_end)
if start is None:
exporter.report.warn("Animation '{}' Loop '{}': Marker '{}' not found. This loop will not be exported".format(
action.name, loop.loop_name, loop.loop_start), indent=2)
action.name, loop.loop_name, loop.loop_start))
if end is None:
exporter.report.warn("Animation '{}' Loop '{}': Marker '{}' not found. This loop will not be exported".format(
action.name, loop.loop_name, loop.loop_end), indent=2)
action.name, loop.loop_name, loop.loop_end))
if start is None or end is None:
continue
atcanim.setLoop(loop.loop_name, _convert_frame_time(start.frame), _convert_frame_time(end.frame))

8
korman/properties/modifiers/gui.py

@ -126,11 +126,10 @@ class TranslationMixin:
def export_localization(self, exporter):
translations = [i for i in self.translations if i.text_id is not None]
if not translations:
exporter.report.error("'{}': '{}' No content translations available. The localization will not be exported.",
self.id_data.name, self.bl_label, indent=1)
exporter.report.error(f"'{self.id_data.name}': '{self.bl_label}' No content translations available. The localization will not be exported.")
return
for i in translations:
exporter.locman.add_string(self.localization_set, self.key_name, i.language, i.text_id, indent=1)
exporter.locman.add_string(self.localization_set, self.key_name, i.language, i.text_id)
def _get_translation(self):
# Ensure there is always a default (read: English) translation available.
@ -460,8 +459,7 @@ class PlasmaLinkingBookModifier(PlasmaModifierProperties, PlasmaModifierLogicWiz
def pre_export(self, exporter, bo):
if not self._check_version(exporter.mgr.getVer()):
# We aren't needed here
exporter.report.port("Object '{}' has a LinkingBookMod not enabled for export to the selected engine. Skipping.",
self.id_data.name, indent=2)
exporter.report.port(f"Object '{self.id_data.name}' has a LinkingBookMod not enabled for export to the selected engine. Skipping.")
return
# Auto-generate a six-foot cube region around the clickable if none was provided.

6
korman/properties/modifiers/physics.py

@ -174,9 +174,9 @@ class PlasmaSubworld(PlasmaModifierProperties):
# plCoordinateInterface::IGetRoot. Not really sure why this happens (nor do I care),
# but we definitely don't want it to happen.
if bo.type != "EMPTY":
exporter.report.warn("Subworld '{}' is attached to a '{}'--this should be an empty.", bo.name, bo.type, indent=1)
exporter.report.warn(f"Subworld '{bo.name}' is attached to a '{bo.type}'--this should be an empty.")
if so.sim:
if exporter.mgr.getVer() > pvPots:
exporter.report.port("Subworld '{}' has physics data--this will cause PotS to crash.", bo.name, indent=1)
exporter.report.port(f"Subworld '{bo.name}' has physics data--this will cause PotS to crash.")
else:
raise ExportError("Subworld '{}' cannot have physics data (should be an empty).".format(bo.name))
raise ExportError(f"Subworld '{bo.name}' cannot have physics data (should be an empty).")

3
korman/properties/modifiers/region.py

@ -340,8 +340,7 @@ class PlasmaSubworldRegion(PlasmaModifierProperties):
from_name, from_type = get_log_text(from_sub, from_isded)
to_name, to_type = get_log_text(to_sub, to_isded)
exporter.report.msg("Transition from '{}' ({}) to '{}' ({})",
from_name, from_type, to_name, to_type,
indent=2)
from_name, from_type, to_name, to_type)
# I think the best solution here is to not worry about the excitement mentioned above.
# If we encounter anything truly interesting, we can fix it in CWE more easily IMO because

17
korman/properties/modifiers/render.py

@ -377,19 +377,17 @@ class PlasmaGrassShaderMod(PlasmaModifierProperties):
def export(self, exporter, bo, so):
if exporter.mgr.getVer() <= pvPots:
exporter.report.warn("Not supported on this version of Plasma", indent=3)
exporter.report.warn("Not supported on this version of Plasma")
return
else:
exporter.report.port("This will only function on MOUL and EOA", indent=3)
exporter.report.port("This will only function on MOUL and EOA")
materials = exporter.mesh.material.get_materials(bo)
if not materials:
exporter.report.warn("No materials are associated with this object, no grass shader exported!",
indent=3)
exporter.report.warn("No materials are associated with this object, no grass shader exported!")
return
elif len(materials) > 1:
exporter.report.warn("Ah, a multiple material grass shader, eh. You like living dangerously...",
indent=3)
exporter.report.warn("Ah, a multiple material grass shader, eh. You like living dangerously...")
for material in materials:
mod = exporter.mgr.find_create_object(plGrassShaderMod, so=so, name=material.name)
@ -511,8 +509,7 @@ class PlasmaLightMapGen(idprops.IDPropMixin, PlasmaModifierProperties, PlasmaMod
mat_mgr.export_prepared_image(owner=layer, image=lightmap_im,
allowed_formats={"PNG", "JPG"},
extension="hsm",
ephemeral=True,
indent=2)
ephemeral=True)
@classmethod
def _idprop_mapping(cls):
@ -914,14 +911,14 @@ class PlasmaVisControl(idprops.IDPropObjectMixin, PlasmaModifierProperties):
else:
this_sv = bo.plasma_modifiers.softvolume
if this_sv.enabled:
exporter.report.msg("[VisRegion] I'm a SoftVolume myself :)", indent=1)
exporter.report.msg("[VisRegion] I'm a SoftVolume myself :)")
rgn.region = this_sv.get_key(exporter, so)
else:
if not self.soft_region:
raise ExportError("'{}': Visibility Control must have a Soft Volume selected".format(self.key_name))
sv_bo = self.soft_region
sv = sv_bo.plasma_modifiers.softvolume
exporter.report.msg("[VisRegion] SoftVolume '{}'", sv_bo.name, indent=1)
exporter.report.msg("[VisRegion] SoftVolume '{}'", sv_bo.name)
if not sv.enabled:
raise ExportError("'{}': '{}' is not a SoftVolume".format(self.key_name, sv_bo.name))
rgn.region = sv.get_key(exporter)

6
korman/properties/modifiers/sound.py

@ -140,10 +140,10 @@ class PlasmaRandomSound(PlasmaModifierProperties):
surface_id = getattr(plPhysicalSndGroup, surface_name)
if surface_id in sounds:
exporter.report.warn("Overwriting physical {} surface '{}' ID:{}",
groupattr, surface_name, surface_id, indent=2)
groupattr, surface_name, surface_id)
else:
exporter.report.msg("Got physical {} surface '{}' ID:{}",
groupattr, surface_name, surface_id, indent=2)
groupattr, surface_name, surface_id)
sounds[surface_id] = rndmod
# Keeps the LUT (or should that be lookup vector?) as small as possible
setattr(sndgroup, groupattr, [sounds.get(i) for i in range(max(sounds.keys()) + 1)])
@ -345,7 +345,7 @@ class PlasmaSound(idprops.IDPropMixin, bpy.types.PropertyGroup):
name = "Sfx-{}_{}".format(so.key.name, self._sound_name)
else:
name = "Sfx-{}_{}:{}".format(so.key.name, self._sound_name, channel)
exporter.report.msg("[{}] {}", pClass.__name__[2:], name, indent=1)
exporter.report.msg("[{}] {}", pClass.__name__[2:], name)
sound = exporter.mgr.find_create_object(pClass, so=so, name=name)
# If this object is a soft volume itself, we will use our own soft region.

2
korman/properties/modifiers/water.py

@ -156,7 +156,7 @@ class PlasmaSwimRegion(idprops.IDPropObjectMixin, PlasmaModifierProperties, bpy.
# swimming surface should have a detector. m'kay? But still, we might want to make note
# of this sitation. Just in case someone is like "WTF! Why am I not swimming?!?!1111111"
# Because you need to have a detector, dummy.
exporter.report.warn("Swimming Surface '{}' does not specify a detector region".format(bo.name), indent=2)
exporter.report.warn(f"Swimming Surface '{bo.name}' does not specify a detector region")
def get_key(self, exporter, so=None):
pClass = self._CURRENTS[self.current_type]

Loading…
Cancel
Save