Browse Source

Simplify log indentation management.

Previously, the indentation level was hardcoded everywhere. This was
tedious before in that changing the log structure would require changing
many manual indentation values. Now that objects can be trivially
generated at export time, the export code might be much more nested that
before. So, it's better to let indentation be more implicit. This,
therefore, adds a context manager to increase the indentation using
`with` blocks. Manual indentation specification remains for
compatibility with Python 2.2 where required.
pull/369/head
Adam Johnson 1 year ago
parent
commit
e5eba455f3
Signed by: Hoikas
GPG Key ID: 0B6515D6FF6F271E
  1. 17
      korman/exporter/animation.py
  2. 2
      korman/exporter/camera.py
  3. 157
      korman/exporter/convert.py
  4. 2
      korman/exporter/decal.py
  5. 65
      korman/exporter/etlight.py
  6. 2
      korman/exporter/image.py
  7. 57
      korman/exporter/locman.py
  8. 57
      korman/exporter/logger.py
  9. 497
      korman/exporter/material.py
  10. 31
      korman/exporter/mesh.py
  11. 8
      korman/exporter/outfile.py
  12. 6
      korman/exporter/physics.py
  13. 8
      korman/exporter/python.py
  14. 106
      korman/exporter/rtlight.py
  15. 10
      korman/korlib/python.py
  16. 4
      korman/korlib/texture.py
  17. 4
      korman/nodes/node_avatar.py
  18. 11
      korman/nodes/node_core.py
  19. 13
      korman/nodes/node_python.py
  20. 24
      korman/operators/op_image.py
  21. 14
      korman/properties/modifiers/anim.py
  22. 8
      korman/properties/modifiers/gui.py
  23. 6
      korman/properties/modifiers/physics.py
  24. 3
      korman/properties/modifiers/region.py
  25. 17
      korman/properties/modifiers/render.py
  26. 6
      korman/properties/modifiers/sound.py
  27. 2
      korman/properties/modifiers/water.py

17
korman/exporter/animation.py

@ -158,10 +158,10 @@ class AnimationConverter:
if energy_curve is None and color_curves is None: if energy_curve is None and color_curves is None:
return None return None
elif lamp.use_only_shadow: elif lamp.use_only_shadow:
self._exporter().report.warn("Cannot animate Lamp color because this lamp only casts shadows", indent=3) self._exporter().report.warn("Cannot animate Lamp color because this lamp only casts shadows")
return None return None
elif not lamp.use_specular and not lamp.use_diffuse: elif not lamp.use_specular and not lamp.use_diffuse:
self._exporter().report.warn("Cannot animate Lamp color because neither Diffuse nor Specular are enabled", indent=3) self._exporter().report.warn("Cannot animate Lamp color because neither Diffuse nor Specular are enabled")
return None return None
# OK Specular is easy. We just toss out the color as a point3. # OK Specular is easy. We just toss out the color as a point3.
@ -251,7 +251,7 @@ class AnimationConverter:
yield applicator yield applicator
elif falloff == "INVERSE_SQUARE": elif falloff == "INVERSE_SQUARE":
if self._mgr.getVer() >= pvMoul: if self._mgr.getVer() >= pvMoul:
report.port("Lamp {} Falloff animations are only supported in Myst Online: Uru Live", falloff, indent=3) report.port(f"Lamp {falloff} Falloff animations are only supported in Myst Online: Uru Live")
keyframes = self._process_fcurves(omni_fcurves, omni_channels, 1, convert_omni_atten, keyframes = self._process_fcurves(omni_fcurves, omni_channels, 1, convert_omni_atten,
omni_defaults, start=start, end=end) omni_defaults, start=start, end=end)
if keyframes: if keyframes:
@ -262,7 +262,7 @@ class AnimationConverter:
applicator.channel = channel applicator.channel = channel
yield applicator yield applicator
else: else:
report.warn("Lamp {} Falloff animations are not supported for this version of Plasma", falloff, indent=3) report.warn(f"Lamp {falloff} Falloff animations are not supported for this version of Plasma")
else: else:
report.warn("Lamp Falloff '{}' animations are not supported", falloff, ident=3) report.warn("Lamp Falloff '{}' animations are not supported", falloff, ident=3)
@ -294,8 +294,7 @@ class AnimationConverter:
applicator.channel = channel applicator.channel = channel
yield applicator yield applicator
else: else:
self._exporter().report.warn("[{}]: Volume animation evaluated to zero keyframes!", self._exporter().report.warn(f"[{sound.sound.name}]: Volume animation evaluated to zero keyframes!")
sound.sound.name, indent=2)
break break
def _convert_spot_lamp_animation(self, name, fcurves, lamp, start, end): def _convert_spot_lamp_animation(self, name, fcurves, lamp, start, end):
@ -341,8 +340,10 @@ class AnimationConverter:
def _convert_transform_animation(self, bo, fcurves, default_xform, adjust_xform, *, allow_empty: Optional[bool] = False, def _convert_transform_animation(self, bo, fcurves, default_xform, adjust_xform, *, allow_empty: Optional[bool] = False,
start: Optional[int] = None, end: Optional[int] = None) -> Optional[plMatrixChannelApplicator]: start: Optional[int] = None, end: Optional[int] = None) -> Optional[plMatrixChannelApplicator]:
if adjust_xform != mathutils.Matrix.Identity(4): if adjust_xform != mathutils.Matrix.Identity(4):
self._exporter().report.warn(("{}: Transform animation is not local and may export incorrectly. " + self._exporter().report.warn(
"Please use Alt-P -> Clear Parent Inverse before animating objects to avoid issues.").format(bo.name), indent=1) f"'{bo.name}': Transform animation is not local and may export incorrectly. "
"Please use Alt-P -> Clear Parent Inverse before animating objects to avoid issues."
)
else: else:
# Adjustment matrix is identity, just pass None instead... # Adjustment matrix is identity, just pass None instead...
adjust_xform = None adjust_xform = None

2
korman/exporter/camera.py

@ -123,7 +123,7 @@ class CameraConverter:
elif props.poa_type == "object": elif props.poa_type == "object":
brain.poaObject = self._mgr.find_create_key(plSceneObject, bl=props.poa_object) brain.poaObject = self._mgr.find_create_key(plSceneObject, bl=props.poa_object)
else: else:
self._report.warn("Circle Camera '{}' has no Point of Attention. Is this intended?", bo.name, indent=3) self._report.warn(f"Circle Camera '{bo.name}' has no Point of Attention. Is this intended?")
if props.circle_pos == "farthest": if props.circle_pos == "farthest":
brain.circleFlags |= plCameraBrain1_Circle.kFarthest brain.circleFlags |= plCameraBrain1_Circle.kFarthest

157
korman/exporter/convert.py

@ -219,12 +219,13 @@ class Exporter:
inc_progress = self.report.progress_increment inc_progress = self.report.progress_increment
self.report.msg("\nEnsuring Age is sane...") self.report.msg("\nEnsuring Age is sane...")
for bl_obj in self._objects: with self.report.indent():
for mod in bl_obj.plasma_modifiers.modifiers: for bl_obj in self._objects:
fn = getattr(mod, "sanity_check", None) for mod in bl_obj.plasma_modifiers.modifiers:
if fn is not None: fn = getattr(mod, "sanity_check", None)
fn() if fn is not None:
inc_progress() fn()
inc_progress()
self.report.msg("... Age is grinning and holding a spatula. Must be OK, then.") self.report.msg("... Age is grinning and holding a spatula. Must be OK, then.")
def _export_age_info(self): def _export_age_info(self):
@ -254,7 +255,7 @@ class Exporter:
parent = bo.parent parent = bo.parent
if parent is not None: if parent is not None:
if parent.plasma_object.enabled: if parent.plasma_object.enabled:
self.report.msg("Attaching to parent SceneObject '{}'", parent.name, indent=1) self.report.msg(f"Attaching to parent SceneObject '{parent.name}'")
parent_ci = self._export_coordinate_interface(None, parent) parent_ci = self._export_coordinate_interface(None, parent)
parent_ci.addChild(so.key) parent_ci.addChild(so.key)
else: else:
@ -285,42 +286,47 @@ class Exporter:
self.report.msg("\nExporting localization...") self.report.msg("\nExporting localization...")
for bl_obj in self._objects: with self.report.indent():
for mod in filter(lambda x: hasattr(x, "export_localization"), bl_obj.plasma_modifiers.modifiers): for bl_obj in self._objects:
mod.export_localization(self) for mod in filter(lambda x: hasattr(x, "export_localization"), bl_obj.plasma_modifiers.modifiers):
inc_progress() mod.export_localization(self)
inc_progress()
def _export_scene_objects(self): def _export_scene_objects(self):
self.report.progress_advance() self.report.progress_advance()
self.report.progress_range = len(self._objects) self.report.progress_range = len(self._objects)
inc_progress = self.report.progress_increment inc_progress = self.report.progress_increment
log_msg = self.report.msg log_msg = self.report.msg
indent = self.report.indent
for bl_obj in self._objects: for bl_obj in self._objects:
log_msg("\n[SceneObject '{}']".format(bl_obj.name)) log_msg(f"\n[SceneObject '{bl_obj.name}']")
# First pass: do things specific to this object type. with indent():
# note the function calls: to export a MESH, it's _export_mesh_blobj # First pass: do things specific to this object type.
export_fn = "_export_{}_blobj".format(bl_obj.type.lower()) # note the function calls: to export a MESH, it's _export_mesh_blobj
try: export_fn = "_export_{}_blobj".format(bl_obj.type.lower())
export_fn = getattr(self, export_fn) try:
except AttributeError: export_fn = getattr(self, export_fn)
self.report.warn("""'{}' is a Plasma Object of Blender type '{}' except AttributeError:
... And I have NO IDEA what to do with that! Tossing.""".format(bl_obj.name, bl_obj.type)) self.report.warn("""'{}' is a Plasma Object of Blender type '{}'
continue ... And I have NO IDEA what to do with that! Tossing.""".format(bl_obj.name, bl_obj.type))
log_msg("Blender Object '{}' of type '{}'".format(bl_obj.name, bl_obj.type), indent=1) continue
log_msg(f"Blender Object '{bl_obj.name}' of type '{bl_obj.type}'")
# Create a sceneobject if one does not exist.
# Before we call the export_fn, we need to determine if this object is an actor of any # Create a sceneobject if one does not exist.
# sort, and barf out a CI. # Before we call the export_fn, we need to determine if this object is an actor of any
sceneobject = self.mgr.find_create_object(plSceneObject, bl=bl_obj) # sort, and barf out a CI.
self._export_actor(sceneobject, bl_obj) sceneobject = self.mgr.find_create_object(plSceneObject, bl=bl_obj)
export_fn(sceneobject, bl_obj) self._export_actor(sceneobject, bl_obj)
with indent():
# And now we puke out the modifiers... export_fn(sceneobject, bl_obj)
for mod in bl_obj.plasma_modifiers.modifiers:
log_msg("Exporting '{}' modifier".format(mod.bl_label), indent=1) # And now we puke out the modifiers...
mod.export(self, bl_obj, sceneobject) for mod in bl_obj.plasma_modifiers.modifiers:
log_msg(f"Exporting '{mod.bl_label}' modifier")
with indent():
mod.export(self, bl_obj, sceneobject)
inc_progress() inc_progress()
def _export_camera_blobj(self, so, bo): def _export_camera_blobj(self, so, bo):
@ -338,27 +344,31 @@ class Exporter:
if bo.data.materials: if bo.data.materials:
self.mesh.export_object(bo, so) self.mesh.export_object(bo, so)
else: else:
self.report.msg("No material(s) on the ObData, so no drawables", indent=1) self.report.msg("No material(s) on the ObData, so no drawables")
def _export_font_blobj(self, so, bo): def _export_font_blobj(self, so, bo):
with utils.temporary_mesh_object(bo) as meshObj: with utils.temporary_mesh_object(bo) as meshObj:
if bo.data.materials: if bo.data.materials:
self.mesh.export_object(meshObj, so) self.mesh.export_object(meshObj, so)
else: else:
self.report.msg("No material(s) on the ObData, so no drawables", indent=1) self.report.msg("No material(s) on the ObData, so no drawables")
def _export_referenced_node_trees(self): def _export_referenced_node_trees(self):
self.report.progress_advance() self.report.progress_advance()
self.report.progress_range = len(self.want_node_trees) self.report.progress_range = len(self.want_node_trees)
inc_progress = self.report.progress_increment inc_progress = self.report.progress_increment
log_msg = self.report.msg
self.report.msg("\nChecking Logic Trees...") indent = self.report.indent
for tree_name, references in self.want_node_trees.items():
self.report.msg("NodeTree '{}'", tree_name, indent=1) log_msg("\nChecking Logic Trees...")
tree = bpy.data.node_groups[tree_name] with indent():
for bo, so in references: for tree_name, references in self.want_node_trees.items():
tree.export(self, bo, so) log_msg(f"NodeTree '{tree_name}'")
inc_progress() with indent():
tree = bpy.data.node_groups[tree_name]
for bo, so in references:
tree.export(self, bo, so)
inc_progress()
def _harvest_actors(self): def _harvest_actors(self):
self.report.progress_advance() self.report.progress_advance()
@ -401,8 +411,10 @@ class Exporter:
self.report.progress_advance() self.report.progress_advance()
self.report.progress_range = len(self._objects) self.report.progress_range = len(self._objects)
inc_progress = self.report.progress_increment inc_progress = self.report.progress_increment
self.report.msg("\nPost-Processing SceneObjects...") log_msg = self.report.msg
indent = self.report.indent
log_msg("\nPost-Processing SceneObjects...")
mat_mgr = self.mesh.material mat_mgr = self.mesh.material
for bl_obj in self._objects: for bl_obj in self._objects:
sceneobject = self.mgr.find_object(plSceneObject, bl=bl_obj) sceneobject = self.mgr.find_object(plSceneObject, bl=bl_obj)
@ -421,18 +433,22 @@ class Exporter:
net.propagate_synch_options(sceneobject, layer) net.propagate_synch_options(sceneobject, layer)
# Modifiers don't have to expose post-processing, but if they do, run it # Modifiers don't have to expose post-processing, but if they do, run it
for mod in bl_obj.plasma_modifiers.modifiers: with indent():
proc = getattr(mod, "post_export", None) for mod in bl_obj.plasma_modifiers.modifiers:
if proc is not None: proc = getattr(mod, "post_export", None)
self.report.msg("Post processing '{}' modifier '{}'", bl_obj.name, mod.bl_label, indent=1) if proc is not None:
proc(self, bl_obj, sceneobject) self.report.msg(f"Post processing '{bl_obj.name}' modifier '{mod.bl_label}'")
with indent():
proc(self, bl_obj, sceneobject)
inc_progress() inc_progress()
def _pre_export_scene_objects(self): def _pre_export_scene_objects(self):
self.report.progress_advance() self.report.progress_advance()
self.report.progress_range = len(self._objects) self.report.progress_range = len(self._objects)
inc_progress = self.report.progress_increment inc_progress = self.report.progress_increment
self.report.msg("\nGenerating export dependency objects...") log_msg = self.report.msg
indent = self.report.indent
log_msg("\nGenerating export dependency objects...")
# New objects may be generate during this process; they will be appended at the end. # New objects may be generate during this process; they will be appended at the end.
new_objects = [] new_objects = []
@ -450,8 +466,10 @@ class Exporter:
@handle_temporary.register(bpy.types.Object) @handle_temporary.register(bpy.types.Object)
def _(temporary, parent): def _(temporary, parent):
self.exit_stack.enter_context(TemporaryObject(temporary, bpy.data.objects.remove)) self.exit_stack.enter_context(TemporaryObject(temporary, bpy.data.objects.remove))
self.report.msg("'{}': generated Object '{}' (Plasma Object: {})", parent.name, log_msg(
temporary.name, temporary.plasma_object.enabled, indent=1) f"'{parent.name}': generated Object '{temporary.name}' "
f"(Plasma Object: {temporary.plasma_object.enabled})",
)
if temporary.plasma_object.enabled: if temporary.plasma_object.enabled:
new_objects.append(temporary) new_objects.append(temporary)
@ -461,14 +479,15 @@ class Exporter:
temporary.plasma_object.page = parent.plasma_object.page temporary.plasma_object.page = parent.plasma_object.page
# Wow, recursively generated objects. Aren't you special? # Wow, recursively generated objects. Aren't you special?
for mod in temporary.plasma_modifiers.modifiers: with indent():
mod.sanity_check() for mod in temporary.plasma_modifiers.modifiers:
do_pre_export(temporary) mod.sanity_check()
do_pre_export(temporary)
@handle_temporary.register(bpy.types.NodeTree) @handle_temporary.register(bpy.types.NodeTree)
def _(temporary, parent): def _(temporary, parent):
self.exit_stack.enter_context(TemporaryObject(temporary, bpy.data.node_groups.remove)) self.exit_stack.enter_context(TemporaryObject(temporary, bpy.data.node_groups.remove))
self.report.msg("'{}' generated NodeTree '{}'", parent.name, temporary.name) log_msg(f"'{parent.name}' generated NodeTree '{temporary.name}'")
if temporary.bl_idname == "PlasmaNodeTree": if temporary.bl_idname == "PlasmaNodeTree":
parent_so = self.mgr.find_create_object(plSceneObject, bl=parent) parent_so = self.mgr.find_create_object(plSceneObject, bl=parent)
self.want_node_trees[temporary.name].add((parent, parent_so)) self.want_node_trees[temporary.name].add((parent, parent_so))
@ -482,11 +501,12 @@ class Exporter:
for i in filter(None, result): for i in filter(None, result):
handle_temporary(i, bo) handle_temporary(i, bo)
for bl_obj in self._objects: with indent():
do_pre_export(bl_obj) for bl_obj in self._objects:
inc_progress() do_pre_export(bl_obj)
inc_progress()
self.report.msg("... {} new object(s) were generated!", len(new_objects)) log_msg(f"... {len(new_objects)} new object(s) were generated!")
self._objects += new_objects self._objects += new_objects
def _pack_ancillary_python(self): def _pack_ancillary_python(self):
@ -506,12 +526,13 @@ class Exporter:
# If something bad happens in the final flush, it would be a shame to # If something bad happens in the final flush, it would be a shame to
# simply toss away the potentially freshly regenerated texture cache. # simply toss away the potentially freshly regenerated texture cache.
try: with self.report.indent():
self.locman.save() try:
self.mgr.save_age() self.locman.save()
self.output.save() self.mgr.save_age()
finally: self.output.save()
self.image.save() finally:
self.image.save()
@property @property
def age_name(self): def age_name(self):

2
korman/exporter/decal.py

@ -112,7 +112,7 @@ class DecalConverter:
name = "{}_{}".format(decal_name, bo.name) if is_waveset else decal_name name = "{}_{}".format(decal_name, bo.name) if is_waveset else decal_name
decal_mgr = exporter.mgr.find_object(pClass, bl=bo, name=name) decal_mgr = exporter.mgr.find_object(pClass, bl=bo, name=name)
if decal_mgr is None: if decal_mgr is None:
self._report.msg("Exporing decal manager '{}' to '{}'", decal_name, name, indent=2) self._report.msg(f"Exporing decal manager '{decal_name}' to '{name}'")
decal_mgr = exporter.mgr.add_object(pClass, bl=bo, name=name) decal_mgr = exporter.mgr.add_object(pClass, bl=bo, name=name)
self._decal_managers[decal_name].append(decal_mgr.key) self._decal_managers[decal_name].append(decal_mgr.key)

65
korman/exporter/etlight.py

@ -112,7 +112,7 @@ class LightBaker:
self._report.msg("\nBaking Static Lighting...") self._report.msg("\nBaking Static Lighting...")
with GoodNeighbor() as toggle: with GoodNeighbor() as toggle, self._report.indent():
try: try:
# reduce the amount of indentation # reduce the amount of indentation
bake = self._harvest_bakable_objects(objs, toggle) bake = self._harvest_bakable_objects(objs, toggle)
@ -143,26 +143,25 @@ class LightBaker:
# Step 1: Prepare... Apply UVs, etc, etc, etc # Step 1: Prepare... Apply UVs, etc, etc, etc
self._report.progress_advance() self._report.progress_advance()
self._report.progress_range = len(bake) self._report.progress_range = len(bake)
self._report.msg("Preparing to bake...", indent=1) self._report.msg("Preparing to bake...")
for key, value in bake.items(): with self._report.indent():
if key[0] == "lightmap": for key, value in bake.items():
for i in range(len(value)-1, -1, -1): if key[0] == "lightmap":
obj = value[i] for i in range(len(value)-1, -1, -1):
if not self._prep_for_lightmap(obj, toggle): obj = value[i]
self._report.msg("Lightmap '{}' will not be baked -- no applicable lights", if not self._prep_for_lightmap(obj, toggle):
obj.name, indent=2) self._report.msg(f"Lightmap '{obj.name}' will not be baked -- no applicable lights")
value.pop(i) value.pop(i)
elif key[0] == "vcol": elif key[0] == "vcol":
for i in range(len(value)-1, -1, -1): for i in range(len(value)-1, -1, -1):
obj = value[i] obj = value[i]
if not self._prep_for_vcols(obj, toggle): if not self._prep_for_vcols(obj, toggle):
if self._has_valid_material(obj): if self._has_valid_material(obj):
self._report.msg("VCols '{}' will not be baked -- no applicable lights", self._report.msg(f"VCols '{obj.name}' will not be baked -- no applicable lights")
obj.name, indent=2) value.pop(i)
value.pop(i) else:
else: raise RuntimeError(key[0])
raise RuntimeError(key[0]) inc_progress()
inc_progress()
self._report.msg(" ...") self._report.msg(" ...")
# Step 2: BAKE! # Step 2: BAKE!
@ -172,14 +171,15 @@ class LightBaker:
if value: if value:
if key[0] == "lightmap": if key[0] == "lightmap":
num_objs = len(value) num_objs = len(value)
self._report.msg("{} Lightmap(s) [H:{:X}]", num_objs, hash(key[1:]), indent=1) self._report.msg("{} Lightmap(s) [H:{:X}]", num_objs, hash(key[1:]))
if largest_pass > 1 and num_objs < round(largest_pass * 0.02): if largest_pass > 1 and num_objs < round(largest_pass * 0.02):
pass_names = set((i.plasma_modifiers.lightmap.bake_pass_name for i in value)) pass_names = set((i.plasma_modifiers.lightmap.bake_pass_name for i in value))
pass_msg = ", ".join(pass_names) pass_msg = ", ".join(pass_names)
self._report.warn("Small lightmap bake pass! Bake Pass(es): {}".format(pass_msg), indent=2) with self._report.indent():
self._report.warn(f"Small lightmap bake pass! Bake Pass(es): {pass_msg}")
self._bake_lightmaps(value, key[1:]) self._bake_lightmaps(value, key[1:])
elif key[0] == "vcol": elif key[0] == "vcol":
self._report.msg("{} Vertex Color(s) [H:{:X}]", len(value), hash(key[1:]), indent=1) self._report.msg("{} Vertex Color(s) [H:{:X}]", len(value), hash(key[1:]))
self._bake_vcols(value, key[1:]) self._bake_vcols(value, key[1:])
self._fix_vertex_colors(value) self._fix_vertex_colors(value)
else: else:
@ -327,9 +327,9 @@ class LightBaker:
if mod.image is not None: if mod.image is not None:
uv_texture_names = frozenset((i.name for i in obj.data.uv_textures)) uv_texture_names = frozenset((i.name for i in obj.data.uv_textures))
if self.lightmap_uvtex_name in uv_texture_names: if self.lightmap_uvtex_name in uv_texture_names:
self._report.msg("'{}': Skipping due to valid lightmap override", obj.name, indent=1) self._report.msg("'{}': Skipping due to valid lightmap override", obj.name)
else: else:
self._report.warn("'{}': Have lightmap, but regenerating UVs", obj.name, indent=1) self._report.warn("'{}': Have lightmap, but regenerating UVs", obj.name)
self._prep_for_lightmap_uvs(obj, mod.image, toggle) self._prep_for_lightmap_uvs(obj, mod.image, toggle)
return False return False
return True return True
@ -341,12 +341,12 @@ class LightBaker:
vcol_layer_names = frozenset((vcol_layer.name.lower() for vcol_layer in obj.data.vertex_colors)) vcol_layer_names = frozenset((vcol_layer.name.lower() for vcol_layer in obj.data.vertex_colors))
manual_layer_names = _VERTEX_COLOR_LAYERS & vcol_layer_names manual_layer_names = _VERTEX_COLOR_LAYERS & vcol_layer_names
if manual_layer_names: if manual_layer_names:
self._report.msg("'{}': Skipping due to valid manual vertex color layer(s): '{}'", obj.name, manual_layer_names.pop(), indent=1) self._report.msg("'{}': Skipping due to valid manual vertex color layer(s): '{}'", obj.name, manual_layer_names.pop())
return False return False
if self.force: if self.force:
return True return True
if self.vcol_layer_name.lower() in vcol_layer_names: if self.vcol_layer_name.lower() in vcol_layer_names:
self._report.msg("'{}': Skipping due to valid matching vertex color layer(s): '{}'", obj.name, self.vcol_layer_name, indent=1) self._report.msg("'{}': Skipping due to valid matching vertex color layer(s): '{}'", obj.name, self.vcol_layer_name)
return False return False
return True return True
@ -377,9 +377,9 @@ class LightBaker:
key = (method,) + lm_layers key = (method,) + lm_layers
bake_pass = bake.setdefault(key, []) bake_pass = bake.setdefault(key, [])
bake_pass.append(i) bake_pass.append(i)
self._report.msg("'{}': Bake to {}", i.name, method, indent=1) self._report.msg("'{}': Bake to {}", i.name, method)
elif mods.lighting.preshade and vcol_bake_required(i): elif mods.lighting.preshade and vcol_bake_required(i):
self._report.msg("'{}': Bake to vcol (crappy)", i.name, indent=1) self._report.msg("'{}': Bake to vcol (crappy)", i.name)
bake_vcol.append(i) bake_vcol.append(i)
return bake return bake
@ -435,7 +435,8 @@ class LightBaker:
im = data_images.new(im_name, width=size, height=size) im = data_images.new(im_name, width=size, height=size)
self._lightmap_images[bo.name] = im self._lightmap_images[bo.name] = im
self._prep_for_lightmap_uvs(bo, im, toggle) with self._report.indent():
self._prep_for_lightmap_uvs(bo, im, toggle)
# Now, set the new LIGHTMAPGEN uv layer as what we want to render to... # Now, set the new LIGHTMAPGEN uv layer as what we want to render to...
# NOTE that this will need to be reset by us to what the user had previously # NOTE that this will need to be reset by us to what the user had previously
@ -492,7 +493,7 @@ class LightBaker:
if self._mesh.is_collapsed(bo): if self._mesh.is_collapsed(bo):
# Danger: uv_base.name -> UnicodeDecodeError (wtf? another blender bug?) # Danger: uv_base.name -> UnicodeDecodeError (wtf? another blender bug?)
self._report.warn("'{}': packing islands in UV Texture '{}' due to modifier collapse", self._report.warn("'{}': packing islands in UV Texture '{}' due to modifier collapse",
bo.name, modifier.uv_map, indent=2) bo.name, modifier.uv_map)
with self._set_mode("EDIT"): with self._set_mode("EDIT"):
bpy.ops.mesh.select_all(action="SELECT") bpy.ops.mesh.select_all(action="SELECT")
bpy.ops.uv.select_all(action="SELECT") bpy.ops.uv.select_all(action="SELECT")

2
korman/exporter/image.py

@ -160,7 +160,7 @@ class ImageCache:
try: try:
cached_image.image_data = tuple(self._read_image_data(cached_image, self._read_stream)) cached_image.image_data = tuple(self._read_image_data(cached_image, self._read_stream))
except AssertionError: except AssertionError:
self._report.warn("Cached copy of '{}' is corrupt and will be discarded", cached_image.name, indent=2) self._report.warn(f"Cached copy of '{cached_image.name}' is corrupt and will be discarded")
self._images.pop(key) self._images.pop(key)
return None return None
return cached_image return cached_image

57
korman/exporter/locman.py

@ -69,12 +69,12 @@ class LocalizationConverter:
self._version = kwargs.get("version") self._version = kwargs.get("version")
self._strings = defaultdict(lambda: defaultdict(dict)) self._strings = defaultdict(lambda: defaultdict(dict))
def add_string(self, set_name, element_name, language, value, indent=0): def add_string(self, set_name, element_name, language, value):
self._report.msg("Accepted '{}' translation for '{}'.", element_name, language, indent=indent) self._report.msg("Accepted '{}' translation for '{}'.", element_name, language)
if isinstance(value, bpy.types.Text): if isinstance(value, bpy.types.Text):
if value.is_modified: if value.is_modified:
self._report.warn("'{}' translation for '{}' is modified on the disk but not reloaded in Blender.", self._report.warn("'{}' translation for '{}' is modified on the disk but not reloaded in Blender.",
element_name, language, indent=indent) element_name, language)
value = value.as_string() value = value.as_string()
for dc in _DUMB_CHARACTERS: for dc in _DUMB_CHARACTERS:
@ -86,7 +86,7 @@ class LocalizationConverter:
if value != old_value: if value != old_value:
self._report.warn( self._report.warn(
"'{}' translation for '{}' has an illegal {}, which was replaced with: {}", "'{}' translation for '{}' has an illegal {}, which was replaced with: {}",
element_name, language, dc.desc, dc.sub, indent=indent element_name, language, dc.desc, dc.sub
) )
self._strings[set_name][element_name][language] = value self._strings[set_name][element_name][language] = value
@ -116,7 +116,7 @@ class LocalizationConverter:
stream.write(contents.encode("windows-1252")) stream.write(contents.encode("windows-1252"))
except UnicodeEncodeError: except UnicodeEncodeError:
self._report.warn("Translation '{}': Contents contains characters that cannot be used in this version of Plasma. They will appear as a '?' in game.", self._report.warn("Translation '{}': Contents contains characters that cannot be used in this version of Plasma. They will appear as a '?' in game.",
language, indent=2) language)
# Yes, there are illegal characters... As a stopgap, we will export the file with # Yes, there are illegal characters... As a stopgap, we will export the file with
# replacement characters ("?") just so it'll work dammit. # replacement characters ("?") just so it'll work dammit.
@ -125,28 +125,30 @@ class LocalizationConverter:
locs = itertools.chain(self._strings["Journals"].items(), self._strings["DynaTexts"].items()) locs = itertools.chain(self._strings["Journals"].items(), self._strings["DynaTexts"].items())
for journal_name, translations in locs: for journal_name, translations in locs:
self._report.msg("Copying localization '{}'", journal_name, indent=1) self._report.msg(f"Copying localization '{journal_name}'")
for language_name, value in translations.items(): with self._report.indent():
if language_name not in _SP_LANGUAGES: for language_name, value in translations.items():
self._report.warn("Translation '{}' will not be used because it is not supported in this version of Plasma.", if language_name not in _SP_LANGUAGES:
language_name, indent=2) self._report.warn("Translation '{}' will not be used because it is not supported in this version of Plasma.",
continue language_name)
suffix = "_{}".format(language_name.lower()) if language_name != "English" else "" continue
file_name = "{}--{}{}.txt".format(age_name, journal_name, suffix) suffix = "_{}".format(language_name.lower()) if language_name != "English" else ""
write_text_file(language_name, file_name, value) file_name = "{}--{}{}.txt".format(age_name, journal_name, suffix)
write_text_file(language_name, file_name, value)
# Ensure that default (read: "English") journal is available # Ensure that default (read: "English") journal is available
if "English" not in translations: with self._report.indent():
language_name, value = next(((language_name, value) for language_name, value in translations.items() if "English" not in translations:
if language_name in _SP_LANGUAGES), (None, None)) language_name, value = next(((language_name, value) for language_name, value in translations.items()
if language_name is not None: if language_name in _SP_LANGUAGES), (None, None))
file_name = "{}--{}.txt".format(age_name, journal_name) if language_name is not None:
# If you manage to screw up this badly... Well, I am very sorry. file_name = "{}--{}.txt".format(age_name, journal_name)
if write_text_file(language_name, file_name, value): # If you manage to screw up this badly... Well, I am very sorry.
self._report.warn("No 'English' translation available, so '{}' will be used as the default", if write_text_file(language_name, file_name, value):
language_name, indent=2) self._report.warn("No 'English' translation available, so '{}' will be used as the default",
else: language_name)
self._report.port("No 'English' nor any other suitable default translation available", indent=2) else:
self._report.port("No 'English' nor any other suitable default translation available")
def _generate_loc_files(self): def _generate_loc_files(self):
if not self._strings: if not self._strings:
@ -243,10 +245,9 @@ class LocalizationConverter:
if modifier.enabled: if modifier.enabled:
translations = [j for j in modifier.translations if j.text_id is not None] translations = [j for j in modifier.translations if j.text_id is not None]
if not translations: if not translations:
self._report.error("'{}': No content translations available. The localization will not be exported.", self._report.error(f"'{i.name}': No content translations available. The localization will not be exported.")
i.name, indent=2)
for j in translations: for j in translations:
self.add_string(modifier.localization_set, modifier.key_name, j.language, j.text_id, indent=1) self.add_string(modifier.localization_set, modifier.key_name, j.language, j.text_id)
inc_progress() inc_progress()
def _run_generate(self): def _run_generate(self):

57
korman/exporter/logger.py

@ -13,25 +13,34 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with Korman. If not, see <http://www.gnu.org/licenses/>. # along with Korman. If not, see <http://www.gnu.org/licenses/>.
from ..korlib import ConsoleCursor, ConsoleToggler from __future__ import annotations
from .explosions import NonfatalExportError
from contextlib import contextmanager
from pathlib import Path from pathlib import Path
import threading import threading
import time import time
from typing import *
if TYPE_CHECKING:
from io import TextIOWrapper
from ..korlib import ConsoleCursor, ConsoleToggler
from .explosions import NonfatalExportError
_HEADING_SIZE = 60 _HEADING_SIZE = 60
_MAX_ELIPSES = 3 _MAX_ELIPSES = 3
_MAX_TIME_UNTIL_ELIPSES = 2.0 _MAX_TIME_UNTIL_ELIPSES = 2.0
class _ExportLogger: class _ExportLogger:
def __init__(self, print_logs, age_path=None): def __init__(self, print_logs: bool, age_path: Optional[str] = None):
self._errors = [] self._errors: List[str] = []
self._porting = [] self._porting: List[str] = []
self._warnings = [] self._warnings: List[str] = []
self._age_path = Path(age_path) if age_path is not None else None self._age_path = Path(age_path) if age_path is not None else None
self._file = None self._file: Optional[TextIOWrapper] = None
self._print_logs = print_logs self._print_logs = print_logs
self._time_start_overall = 0 self._time_start_overall: float = 0.0
self._indent_level: int = 0
def __enter__(self): def __enter__(self):
if self._age_path is not None: if self._age_path is not None:
@ -48,10 +57,22 @@ class _ExportLogger:
self._file.close() self._file.close()
return False return False
@contextmanager
def indent(self):
try:
self._indent_level += 1
yield
finally:
self._indent_level -= 1
@property
def indent_level(self) -> int:
return self._indent_level
def error(self, *args, **kwargs): def error(self, *args, **kwargs):
assert args assert args
indent = kwargs.get("indent", 0) indent = kwargs.get("indent", self._indent_level)
msg = "{}ERROR: {}".format(" " * indent, args[0]) msg = f"{' ' * indent}ERROR: {args[0]}"
if len(args) > 1: if len(args) > 1:
msg = msg.format(*args[1:], **kwargs) msg = msg.format(*args[1:], **kwargs)
if self._file is not None: if self._file is not None:
@ -63,8 +84,8 @@ class _ExportLogger:
def msg(self, *args, **kwargs): def msg(self, *args, **kwargs):
assert args assert args
indent = kwargs.get("indent", 0) indent = kwargs.get("indent", self._indent_level)
msg = "{}{}".format(" " * indent, args[0]) msg = f"{' ' * indent}{args[0]}"
if len(args) > 1: if len(args) > 1:
msg = msg.format(*args[1:], **kwargs) msg = msg.format(*args[1:], **kwargs)
if self._file is not None: if self._file is not None:
@ -74,8 +95,8 @@ class _ExportLogger:
def port(self, *args, **kwargs): def port(self, *args, **kwargs):
assert args assert args
indent = kwargs.get("indent", 0) indent = kwargs.get("indent", self._indent_level)
msg = "{}PORTING: {}".format(" " * indent, args[0]) msg = f"{' ' * indent}PORTNING: {args[0]}"
if len(args) > 1: if len(args) > 1:
msg = msg.format(*args[1:], **kwargs) msg = msg.format(*args[1:], **kwargs)
if self._file is not None: if self._file is not None:
@ -98,14 +119,14 @@ class _ExportLogger:
def progress_end(self): def progress_end(self):
if self._age_path is not None: if self._age_path is not None:
export_time = time.perf_counter() - self._time_start_overall export_time = time.perf_counter() - self._time_start_overall
self.msg("\nExported '{}' in {:.2f}s", self._age_path.name, export_time) self.msg(f"\nExported '{self._age_path.name}' in {export_time:.2f}s")
def progress_increment(self): def progress_increment(self):
pass pass
def progress_start(self, action): def progress_start(self, action):
if self._age_path is not None: if self._age_path is not None:
self.msg("Exporting '{}'", self._age_path.name) self.msg(f"Exporting '{self._age_path.name}'")
self._time_start_overall = time.perf_counter() self._time_start_overall = time.perf_counter()
def raise_errors(self): def raise_errors(self):
@ -122,8 +143,8 @@ class _ExportLogger:
def warn(self, *args, **kwargs): def warn(self, *args, **kwargs):
assert args assert args
indent = kwargs.get("indent", 0) indent = kwargs.get("indent", self._indent_level)
msg = "{}WARNING: {}".format(" " * indent, args[0]) msg = f"{' ' * indent}WARNING: {args[0]}"
if len(args) > 1: if len(args) > 1:
msg = msg.format(*args[1:], **kwargs) msg = msg.format(*args[1:], **kwargs)
if self._file is not None: if self._file is not None:

497
korman/exporter/material.py

@ -193,9 +193,8 @@ class MaterialConverter:
# being a waveset, doublesided, etc. # being a waveset, doublesided, etc.
single_user = self._requires_single_user(bo, bm) single_user = self._requires_single_user(bo, bm)
if single_user: if single_user:
mat_name = "{}_AutoSingle".format(bm.name) if bo.name == bm.name else "{}_{}".format(bo.name, bm.name) mat_name = f"{bm.name}_AutoSingle" if bo.name == bm.name else f"{bo.name}_{bm.name}"
self._report.msg("Exporting Material '{}' as single user '{}'", bm.name, mat_name, indent=1) self._report.msg(f"Exporting Material '{bm.name}' as single user '{mat_name}'")
hgmat = None
else: else:
# Ensure that RT-lit objects don't infect the static-lit objects. # Ensure that RT-lit objects don't infect the static-lit objects.
lighting_mod = bo.plasma_modifiers.lighting lighting_mod = bo.plasma_modifiers.lighting
@ -207,7 +206,7 @@ class MaterialConverter:
mat_prefix = "" mat_prefix = ""
mat_prefix2 = "NonVtxP_" if self._exporter().mesh.is_nonpreshaded(bo, bm) else "" mat_prefix2 = "NonVtxP_" if self._exporter().mesh.is_nonpreshaded(bo, bm) else ""
mat_name = "".join((mat_prefix, mat_prefix2, bm.name)) mat_name = "".join((mat_prefix, mat_prefix2, bm.name))
self._report.msg("Exporting Material '{}'", mat_name, indent=1) self._report.msg(f"Exporting Material '{mat_name}'")
hsgmat = self._mgr.find_key(hsGMaterial, name=mat_name, bl=bo) hsgmat = self._mgr.find_key(hsGMaterial, name=mat_name, bl=bo)
if hsgmat is not None: if hsgmat is not None:
return hsgmat return hsgmat
@ -228,43 +227,44 @@ class MaterialConverter:
restart_pass_next = False restart_pass_next = False
# Loop over layers # Loop over layers
for idx, slot in slots: with self._report.indent():
# Prepend any BumpMapping magic layers for idx, slot in slots:
if slot.use_map_normal: # Prepend any BumpMapping magic layers
if bo in self._bump_mats:
raise ExportError("Material '{}' has more than one bumpmap layer".format(bm.name))
du, dw, dv = self.export_bumpmap_slot(bo, bm, hsgmat, slot, idx)
hsgmat.addLayer(du.key) # Du
hsgmat.addLayer(dw.key) # Dw
hsgmat.addLayer(dv.key) # Dv
if slot.use_stencil:
stencils.append((idx, slot))
else:
tex_name = "{}_{}".format(mat_name, slot.name)
tex_layer = self.export_texture_slot(bo, bm, hsgmat, slot, idx, name=tex_name)
if restart_pass_next:
tex_layer.state.miscFlags |= hsGMatState.kMiscRestartPassHere
restart_pass_next = False
hsgmat.addLayer(tex_layer.key)
if slot.use_map_normal: if slot.use_map_normal:
self._bump_mats[bo] = (tex_layer.UVWSrc, tex_layer.transform) if bo in self._bump_mats:
# After a bumpmap layer(s), the next layer *must* be in a raise ExportError("Material '{}' has more than one bumpmap layer".format(bm.name))
# new pass, otherwise it gets added in non-intuitive ways du, dw, dv = self.export_bumpmap_slot(bo, bm, hsgmat, slot, idx)
restart_pass_next = True hsgmat.addLayer(du.key) # Du
if stencils: hsgmat.addLayer(dw.key) # Dw
tex_state = tex_layer.state hsgmat.addLayer(dv.key) # Dv
if not tex_state.blendFlags & hsGMatState.kBlendMask:
tex_state.blendFlags |= hsGMatState.kBlendAlpha if slot.use_stencil:
tex_state.miscFlags |= hsGMatState.kMiscRestartPassHere | hsGMatState.kMiscBindNext stencils.append((idx, slot))
curr_stencils = len(stencils) else:
for i in range(curr_stencils): tex_name = "{}_{}".format(mat_name, slot.name)
stencil_idx, stencil = stencils[i] tex_layer = self.export_texture_slot(bo, bm, hsgmat, slot, idx, name=tex_name)
stencil_name = "STENCILGEN_{}@{}_{}".format(stencil.name, bm.name, slot.name) if restart_pass_next:
stencil_layer = self.export_texture_slot(bo, bm, hsgmat, stencil, stencil_idx, name=stencil_name) tex_layer.state.miscFlags |= hsGMatState.kMiscRestartPassHere
if i+1 < curr_stencils: restart_pass_next = False
stencil_layer.state.miscFlags |= hsGMatState.kMiscBindNext hsgmat.addLayer(tex_layer.key)
hsgmat.addLayer(stencil_layer.key) if slot.use_map_normal:
self._bump_mats[bo] = (tex_layer.UVWSrc, tex_layer.transform)
# After a bumpmap layer(s), the next layer *must* be in a
# new pass, otherwise it gets added in non-intuitive ways
restart_pass_next = True
if stencils:
tex_state = tex_layer.state
if not tex_state.blendFlags & hsGMatState.kBlendMask:
tex_state.blendFlags |= hsGMatState.kBlendAlpha
tex_state.miscFlags |= hsGMatState.kMiscRestartPassHere | hsGMatState.kMiscBindNext
curr_stencils = len(stencils)
for i in range(curr_stencils):
stencil_idx, stencil = stencils[i]
stencil_name = "STENCILGEN_{}@{}_{}".format(stencil.name, bm.name, slot.name)
stencil_layer = self.export_texture_slot(bo, bm, hsgmat, stencil, stencil_idx, name=stencil_name)
if i+1 < curr_stencils:
stencil_layer.state.miscFlags |= hsGMatState.kMiscBindNext
hsgmat.addLayer(stencil_layer.key)
# Plasma makes several assumptions that every hsGMaterial has at least one layer. If this # Plasma makes several assumptions that every hsGMaterial has at least one layer. If this
# material had no Textures, we will need to initialize a default layer # material had no Textures, we will need to initialize a default layer
@ -293,7 +293,7 @@ class MaterialConverter:
layer.preshade = hsColorRGBA(0.0, 0.0, 0.0, 1.0) layer.preshade = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
layer.runtime = hsColorRGBA(1.0, 1.0, 1.0, 1.0) layer.runtime = hsColorRGBA(1.0, 1.0, 1.0, 1.0)
self.export_prepared_image(name=image_name, image=image, alpha_type=image_alpha, self.export_prepared_image(name=image_name, image=image, alpha_type=image_alpha,
owner=layer, allowed_formats={"DDS"}, indent=4) owner=layer, allowed_formats={"DDS"})
material = self._mgr.add_object(hsGMaterial, bl=bo, name=name) material = self._mgr.add_object(hsGMaterial, bl=bo, name=name)
material.addLayer(layer.key) material.addLayer(layer.key)
return material, layer return material, layer
@ -309,7 +309,7 @@ class MaterialConverter:
# exporting a DXT1 version. As of right now, opaque vs on_off does nothing, so we still # exporting a DXT1 version. As of right now, opaque vs on_off does nothing, so we still
# get some turd-alpha data. # get some turd-alpha data.
if image_alpha == TextureAlpha.full and not want_preshade: if image_alpha == TextureAlpha.full and not want_preshade:
self._report.warn("Using an alpha texture with a non-alpha blend mode -- this may look bad", indent=3) self._report.warn("Using an alpha texture with a non-alpha blend mode -- this may look bad")
image_alpha = TextureAlpha.opaque image_alpha = TextureAlpha.opaque
image_name = "DECALPRINT_{}".format(image.name) image_name = "DECALPRINT_{}".format(image.name)
else: else:
@ -326,15 +326,17 @@ class MaterialConverter:
if rt_key or pre_key: if rt_key or pre_key:
return pre_key, rt_key return pre_key, rt_key
self._report.msg("Exporting Print Material '{}'", rtname, indent=3) self._report.msg(f"Exporting Print Material '{rtname}'")
rt_material, rt_layer = make_print_material(rtname) with self._report.indent():
rt_material, rt_layer = make_print_material(rtname)
if blend == hsGMatState.kBlendMult: if blend == hsGMatState.kBlendMult:
rt_layer.state.blendFlags |= hsGMatState.kBlendInvertFinalColor rt_layer.state.blendFlags |= hsGMatState.kBlendInvertFinalColor
rt_key = rt_material.key rt_key = rt_material.key
if want_preshade: if want_preshade:
self._report.msg("Exporting Print Material '{}'", prename, indent=3) self._report.msg(f"Exporting Print Material '{prename}'")
pre_material, pre_layer = make_print_material(prename) with self._report.indent():
pre_material, pre_layer = make_print_material(prename)
pre_material.compFlags |= hsGMaterial.kCompNeedsBlendChannel pre_material.compFlags |= hsGMaterial.kCompNeedsBlendChannel
pre_layer.state.miscFlags |= hsGMatState.kMiscBindNext | hsGMatState.kMiscRestartPassHere pre_layer.state.miscFlags |= hsGMatState.kMiscBindNext | hsGMatState.kMiscRestartPassHere
pre_layer.preshade = hsColorRGBA(1.0, 1.0, 1.0, 1.0) pre_layer.preshade = hsColorRGBA(1.0, 1.0, 1.0, 1.0)
@ -346,7 +348,8 @@ class MaterialConverter:
blend_layer.state.ZFlags = hsGMatState.kZNoZWrite blend_layer.state.ZFlags = hsGMatState.kZNoZWrite
blend_layer.ambient = hsColorRGBA(1.0, 1.0, 1.0, 1.0) blend_layer.ambient = hsColorRGBA(1.0, 1.0, 1.0, 1.0)
pre_material.addLayer(blend_layer.key) pre_material.addLayer(blend_layer.key)
self.export_alpha_blend("LINEAR", "HORIZONTAL", owner=blend_layer, indent=4) with self._report.indent():
self.export_alpha_blend("LINEAR", "HORIZONTAL", owner=blend_layer)
pre_key = pre_material.key pre_key = pre_material.key
else: else:
@ -354,10 +357,10 @@ class MaterialConverter:
return pre_key, rt_key return pre_key, rt_key
def export_waveset_material(self, bo, bm): def export_waveset_material(self, bo, bm):
self._report.msg("Exporting WaveSet Material '{}'", bm.name, indent=1) self._report.msg(f"Exporting WaveSet Material '{bm.name}'")
# WaveSets MUST have their own material # WaveSets MUST have their own material
unique_name = "{}_WaveSet7".format(bm.name) unique_name = f"{bm.name}_WaveSet7"
hsgmat = self._mgr.add_object(hsGMaterial, name=unique_name, bl=bo) hsgmat = self._mgr.add_object(hsGMaterial, name=unique_name, bl=bo)
# Materials MUST have one layer. Wavesets need alpha blending... # Materials MUST have one layer. Wavesets need alpha blending...
@ -370,13 +373,13 @@ class MaterialConverter:
return hsgmat.key return hsgmat.key
def export_bumpmap_slot(self, bo, bm, hsgmat, slot, idx): def export_bumpmap_slot(self, bo, bm, hsgmat, slot, idx):
name = "{}_{}".format(hsgmat.key.name, slot.name) name = f"{hsgmat.key.name}_{slot.name}"
self._report.msg("Exporting Plasma Bumpmap Layers for '{}'", name, indent=2) self._report.msg(f"Exporting Plasma Bumpmap Layers for '{name}'")
# Okay, now we need to make 3 layers for the Du, Dw, and Dv # Okay, now we need to make 3 layers for the Du, Dw, and Dv
du_layer = self._mgr.find_create_object(plLayer, name="{}_DU_BumpLut".format(name), bl=bo) du_layer = self._mgr.find_create_object(plLayer, name=f"{name}_DU_BumpLut", bl=bo)
dw_layer = self._mgr.find_create_object(plLayer, name="{}_DW_BumpLut".format(name), bl=bo) dw_layer = self._mgr.find_create_object(plLayer, name=f"{name}_DW_BumpLut", bl=bo)
dv_layer = self._mgr.find_create_object(plLayer, name="{}_DV_BumpLut".format(name), bl=bo) dv_layer = self._mgr.find_create_object(plLayer, name=f"{name}_DV_BumpLut", bl=bo)
for layer in (du_layer, dw_layer, dv_layer): for layer in (du_layer, dw_layer, dv_layer):
layer.ambient = hsColorRGBA(1.0, 1.0, 1.0, 1.0) layer.ambient = hsColorRGBA(1.0, 1.0, 1.0, 1.0)
@ -419,118 +422,119 @@ class MaterialConverter:
def export_texture_slot(self, bo, bm, hsgmat, slot, idx, name=None, blend_flags=True): def export_texture_slot(self, bo, bm, hsgmat, slot, idx, name=None, blend_flags=True):
if name is None: if name is None:
name = "{}_{}".format(bm.name if bm is not None else bo.name, slot.name) name = f"{bm.name if bm is not None else bo.name}_{slot.name}"
self._report.msg("Exporting Plasma Layer '{}'", name, indent=2) self._report.msg(f"Exporting Plasma Layer '{name}'")
layer = self._mgr.find_create_object(plLayer, name=name, bl=bo) layer = self._mgr.find_create_object(plLayer, name=name, bl=bo)
if bm is not None and not slot.use_map_normal: if bm is not None and not slot.use_map_normal:
self._propagate_material_settings(bo, bm, slot, layer) self._propagate_material_settings(bo, bm, slot, layer)
# UVW Channel with self._report.indent():
if slot.texture_coords == "UV": # UVW Channel
for i, uvchan in enumerate(bo.data.uv_layers): if slot.texture_coords == "UV":
if uvchan.name == slot.uv_layer: for i, uvchan in enumerate(bo.data.uv_layers):
layer.UVWSrc = i if uvchan.name == slot.uv_layer:
self._report.msg("Using UV Map #{} '{}'", i, name, indent=3) layer.UVWSrc = i
break self._report.msg(f"Using UV Map #{i} '{name}'")
else: break
self._report.msg("No UVMap specified... Blindly using the first one, maybe it exists :|", indent=3) else:
self._report.msg("No UVMap specified... Blindly using the first one, maybe it exists :|")
# Transform # Transform
xform = hsMatrix44() xform = hsMatrix44()
translation = hsVector3(slot.offset.x - (slot.scale.x - 1.0) / 2.0, translation = hsVector3(slot.offset.x - (slot.scale.x - 1.0) / 2.0,
-slot.offset.y - (slot.scale.y - 1.0) / 2.0, -slot.offset.y - (slot.scale.y - 1.0) / 2.0,
slot.offset.z - (slot.scale.z - 1.0) / 2.0) slot.offset.z - (slot.scale.z - 1.0) / 2.0)
xform.setTranslate(translation) xform.setTranslate(translation)
xform.setScale(hsVector3(*slot.scale)) xform.setScale(hsVector3(*slot.scale))
layer.transform = xform layer.transform = xform
wantStencil, canStencil = slot.use_stencil, slot.use_stencil and bm is not None and not slot.use_map_normal wantStencil, canStencil = slot.use_stencil, slot.use_stencil and bm is not None and not slot.use_map_normal
if wantStencil and not canStencil: if wantStencil and not canStencil:
self._exporter().report.warn("{} wants to stencil, but this is not a real Material".format(slot.name)) self._exporter().report.warn(f"{slot.name} wants to stencil, but this is not a real Material")
state = layer.state state = layer.state
if canStencil: if canStencil:
hsgmat.compFlags |= hsGMaterial.kCompNeedsBlendChannel hsgmat.compFlags |= hsGMaterial.kCompNeedsBlendChannel
state.blendFlags |= hsGMatState.kBlendAlpha | hsGMatState.kBlendAlphaMult | hsGMatState.kBlendNoTexColor state.blendFlags |= hsGMatState.kBlendAlpha | hsGMatState.kBlendAlphaMult | hsGMatState.kBlendNoTexColor
state.ZFlags |= hsGMatState.kZNoZWrite
layer.ambient = hsColorRGBA(1.0, 1.0, 1.0, 1.0)
elif blend_flags:
# Standard layer flags ahoy
if slot.blend_type == "ADD":
state.blendFlags |= hsGMatState.kBlendAddColorTimesAlpha
elif slot.blend_type == "MULTIPLY":
state.blendFlags |= hsGMatState.kBlendMult
# Check if this layer uses diffuse/runtime lighting
if bm is not None and not slot.use_map_color_diffuse:
layer.preshade = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
layer.runtime = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
# Check if this layer uses specular lighting
if bm is not None and slot.use_map_color_spec:
state.shadeFlags |= hsGMatState.kShadeSpecular
else:
layer.specular = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
layer.specularPower = 1.0
texture = slot.texture
if texture.type == "BLEND":
hsgmat.compFlags |= hsGMaterial.kCompNeedsBlendChannel
# Handle material and per-texture emissive
if self._is_emissive(bm):
# If the previous slot's use_map_emit is different, then we need to flag this as a new
# pass so that the new emit color will be used. But only if it's not a doggone stencil.
if not wantStencil and bm is not None and slot is not None:
filtered_slots = tuple(filter(lambda x: x and x.use, bm.texture_slots[:idx]))
if filtered_slots:
prev_slot = filtered_slots[-1]
if prev_slot != slot and prev_slot.use_map_emit != slot.use_map_emit:
state.miscFlags |= hsGMatState.kMiscRestartPassHere
if self._is_emissive(bm, slot):
# Lightmapped emissive layers seem to cause cascading render issues. Skip flagging it
# and just hope that the ambient color bump is good enough.
if bo.plasma_modifiers.lightmap.bake_lightmap:
self._report.warn("A lightmapped and emissive material??? You like living dangerously...", indent=3)
else:
state.shadeFlags |= hsGMatState.kShadeEmissive
# Apply custom layer properties
wantBumpmap = bm is not None and slot.use_map_normal
if wantBumpmap:
state.blendFlags = hsGMatState.kBlendDot3
state.miscFlags = hsGMatState.kMiscBumpLayer
strength = max(min(1.0, slot.normal_factor), 0.0)
layer.ambient = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
layer.preshade = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
layer.runtime = hsColorRGBA(strength, 0.0, 0.0, 1.0)
layer.specular = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
else:
layer_props = texture.plasma_layer
layer.opacity = layer_props.opacity / 100
self._handle_layer_opacity(layer, layer_props.opacity)
if layer_props.alpha_halo:
state.blendFlags |= hsGMatState.kBlendAlphaTestHigh
if layer_props.z_bias:
state.ZFlags |= hsGMatState.kZIncLayer
if layer_props.skip_depth_test:
state.ZFlags |= hsGMatState.kZNoZRead
if layer_props.skip_depth_write:
state.ZFlags |= hsGMatState.kZNoZWrite state.ZFlags |= hsGMatState.kZNoZWrite
layer.ambient = hsColorRGBA(1.0, 1.0, 1.0, 1.0)
elif blend_flags:
# Standard layer flags ahoy
if slot.blend_type == "ADD":
state.blendFlags |= hsGMatState.kBlendAddColorTimesAlpha
elif slot.blend_type == "MULTIPLY":
state.blendFlags |= hsGMatState.kBlendMult
# Export the specific texture type # Check if this layer uses diffuse/runtime lighting
self._tex_exporters[texture.type](bo, layer, slot, idx) if bm is not None and not slot.use_map_color_diffuse:
layer.preshade = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
# Export any layer animations layer.runtime = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
# NOTE: animated stencils and bumpmaps are nonsense.
if not slot.use_stencil and not wantBumpmap:
layer = self._export_layer_animations(bo, bm, slot, idx, layer)
# Stash the top of the stack for later in the export # Check if this layer uses specular lighting
if bm is not None: if bm is not None and slot.use_map_color_spec:
self._obj2layer[bo][bm][texture].append(layer.key) state.shadeFlags |= hsGMatState.kShadeSpecular
return layer else:
layer.specular = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
layer.specularPower = 1.0
texture = slot.texture
if texture.type == "BLEND":
hsgmat.compFlags |= hsGMaterial.kCompNeedsBlendChannel
# Handle material and per-texture emissive
if self._is_emissive(bm):
# If the previous slot's use_map_emit is different, then we need to flag this as a new
# pass so that the new emit color will be used. But only if it's not a doggone stencil.
if not wantStencil and bm is not None and slot is not None:
filtered_slots = tuple(filter(lambda x: x and x.use, bm.texture_slots[:idx]))
if filtered_slots:
prev_slot = filtered_slots[-1]
if prev_slot != slot and prev_slot.use_map_emit != slot.use_map_emit:
state.miscFlags |= hsGMatState.kMiscRestartPassHere
if self._is_emissive(bm, slot):
# Lightmapped emissive layers seem to cause cascading render issues. Skip flagging it
# and just hope that the ambient color bump is good enough.
if bo.plasma_modifiers.lightmap.bake_lightmap:
self._report.warn("A lightmapped and emissive material??? You like living dangerously...")
else:
state.shadeFlags |= hsGMatState.kShadeEmissive
# Apply custom layer properties
wantBumpmap = bm is not None and slot.use_map_normal
if wantBumpmap:
state.blendFlags = hsGMatState.kBlendDot3
state.miscFlags = hsGMatState.kMiscBumpLayer
strength = max(min(1.0, slot.normal_factor), 0.0)
layer.ambient = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
layer.preshade = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
layer.runtime = hsColorRGBA(strength, 0.0, 0.0, 1.0)
layer.specular = hsColorRGBA(0.0, 0.0, 0.0, 1.0)
else:
layer_props = texture.plasma_layer
layer.opacity = layer_props.opacity / 100
self._handle_layer_opacity(layer, layer_props.opacity)
if layer_props.alpha_halo:
state.blendFlags |= hsGMatState.kBlendAlphaTestHigh
if layer_props.z_bias:
state.ZFlags |= hsGMatState.kZIncLayer
if layer_props.skip_depth_test:
state.ZFlags |= hsGMatState.kZNoZRead
if layer_props.skip_depth_write:
state.ZFlags |= hsGMatState.kZNoZWrite
# Export the specific texture type
self._tex_exporters[texture.type](bo, layer, slot, idx)
# Export any layer animations
# NOTE: animated stencils and bumpmaps are nonsense.
if not slot.use_stencil and not wantBumpmap:
layer = self._export_layer_animations(bo, bm, slot, idx, layer)
# Stash the top of the stack for later in the export
if bm is not None:
self._obj2layer[bo][bm][texture].append(layer.key)
return layer
def _export_layer_animations(self, bo, bm, tex_slot, idx, base_layer) -> plLayer: def _export_layer_animations(self, bo, bm, tex_slot, idx, base_layer) -> plLayer:
top_layer = base_layer top_layer = base_layer
@ -715,7 +719,7 @@ class MaterialConverter:
# to a big "finalize" save step to prevent races. The texture cache would # to a big "finalize" save step to prevent races. The texture cache would
# prevent that as well, so we could theoretically slice-and-dice the single # prevent that as well, so we could theoretically slice-and-dice the single
# image here... but... meh. Offloading taim. # image here... but... meh. Offloading taim.
self.export_prepared_image(texture=texture, owner=layer, indent=3, self.export_prepared_image(texture=texture, owner=layer,
alpha_type=TextureAlpha.opaque, mipmap=True, alpha_type=TextureAlpha.opaque, mipmap=True,
allowed_formats={"DDS"}, is_cube_map=True, tag="cubemap") allowed_formats={"DDS"}, is_cube_map=True, tag="cubemap")
@ -732,7 +736,7 @@ class MaterialConverter:
oRes = bl_env.resolution oRes = bl_env.resolution
eRes = helpers.ensure_power_of_two(oRes) eRes = helpers.ensure_power_of_two(oRes)
if oRes != eRes: if oRes != eRes:
self._report.msg("Overriding EnvMap size to ({}x{}) -- POT", eRes, eRes, indent=3) self._report.msg(f"Overriding EnvMap size to ({eRes}x{eRes}) -- POT")
# And now for the general ho'hum-ness # And now for the general ho'hum-ness
pl_env = self._mgr.find_create_object(pl_class, bl=bo, name=name) pl_env = self._mgr.find_create_object(pl_class, bl=bo, name=name)
@ -791,7 +795,7 @@ class MaterialConverter:
if viewpt.type == "CAMERA": if viewpt.type == "CAMERA":
warn = self._report.port if bl_env.mapping == "PLANE" else self._report.warn warn = self._report.port if bl_env.mapping == "PLANE" else self._report.warn
warn("Environment Map '{}' is exporting as a cube map. The viewpoint '{}' is a camera, but only its position will be used.", warn("Environment Map '{}' is exporting as a cube map. The viewpoint '{}' is a camera, but only its position will be used.",
bl_env.id_data.name, viewpt.name, indent=5) bl_env.id_data.name, viewpt.name)
# DEMs can do just a position vector. We actually prefer this because the WaveSet exporter # DEMs can do just a position vector. We actually prefer this because the WaveSet exporter
# will probably want to steal it for diabolical purposes... In MOUL, root objects are # will probably want to steal it for diabolical purposes... In MOUL, root objects are
@ -830,8 +834,7 @@ class MaterialConverter:
alpha_type = self._test_image_alpha(texture.image) alpha_type = self._test_image_alpha(texture.image)
has_alpha = texture.use_calculate_alpha or slot.use_stencil or alpha_type != TextureAlpha.opaque has_alpha = texture.use_calculate_alpha or slot.use_stencil or alpha_type != TextureAlpha.opaque
if (texture.image.use_alpha and texture.use_alpha) and not has_alpha: if (texture.image.use_alpha and texture.use_alpha) and not has_alpha:
warning = "'{}' wants to use alpha, but '{}' is opaque".format(texture.name, texture.image.name) self._report.warn(f"'{texture.name}' wants to use alpha, but '{texture.image.name}' is opaque")
self._exporter().report.warn(warning, indent=3)
else: else:
alpha_type, has_alpha = TextureAlpha.opaque, False alpha_type, has_alpha = TextureAlpha.opaque, False
@ -894,8 +897,7 @@ class MaterialConverter:
detail_fade_stop=layer_props.detail_fade_stop, detail_fade_stop=layer_props.detail_fade_stop,
detail_opacity_start=layer_props.detail_opacity_start, detail_opacity_start=layer_props.detail_opacity_start,
detail_opacity_stop=layer_props.detail_opacity_stop, detail_opacity_stop=layer_props.detail_opacity_stop,
mipmap=mipmap, allowed_formats=allowed_formats, mipmap=mipmap, allowed_formats=allowed_formats)
indent=3)
def _export_texture_type_none(self, bo, layer, slot, idx): def _export_texture_type_none(self, bo, layer, slot, idx):
# We'll allow this, just for sanity's sake... # We'll allow this, just for sanity's sake...
@ -911,14 +913,12 @@ class MaterialConverter:
texture = slot.texture texture = slot.texture
self.export_alpha_blend(texture.progression, texture.use_flip_axis, layer) self.export_alpha_blend(texture.progression, texture.use_flip_axis, layer)
def export_alpha_blend(self, progression, axis, owner, indent=2): def export_alpha_blend(self, progression, axis, owner):
"""This exports an alpha blend texture as exposed by bpy.types.BlendTexture. """This exports an alpha blend texture as exposed by bpy.types.BlendTexture.
The following arguments are expected: The following arguments are expected:
- progression: (required) - progression: (required)
- axis: (required) - axis: (required)
- owner: (required) the Plasma object using this image - owner: (required) the Plasma object using this image
- indent: (optional) indentation level for log messages
default: 2
""" """
# Certain blend types don't use an axis... # Certain blend types don't use an axis...
@ -1013,7 +1013,7 @@ class MaterialConverter:
image.pack(True) image.pack(True)
self.export_prepared_image(image=image, owner=owner, allowed_formats={"BMP"}, self.export_prepared_image(image=image, owner=owner, allowed_formats={"BMP"},
alpha_type=TextureAlpha.full, indent=indent, ephemeral=True) alpha_type=TextureAlpha.full, ephemeral=True)
def export_prepared_image(self, **kwargs): def export_prepared_image(self, **kwargs):
"""This exports an externally prepared image and an optional owning layer. """This exports an externally prepared image and an optional owning layer.
@ -1026,8 +1026,6 @@ class MaterialConverter:
valid options: BMP, DDS, JPG, PNG valid options: BMP, DDS, JPG, PNG
- extension: (optional) file extension to use for the image object - extension: (optional) file extension to use for the image object
to use the image datablock extension, set this to None to use the image datablock extension, set this to None
- indent: (optional) indentation level for log messages
default: 2
- ephemeral: (optional) never cache this image - ephemeral: (optional) never cache this image
- tag: (optional) an optional identifier hint that allows multiple images with the - tag: (optional) an optional identifier hint that allows multiple images with the
same name to coexist in the cache same name to coexist in the cache
@ -1035,15 +1033,14 @@ class MaterialConverter:
that must be split into six separate images for Plasma that must be split into six separate images for Plasma
""" """
owner = kwargs.pop("owner", None) owner = kwargs.pop("owner", None)
indent = kwargs.pop("indent", 2)
key = _Texture(**kwargs) key = _Texture(**kwargs)
image = key.image image = key.image
if key not in self._pending: if key not in self._pending:
self._report.msg("Stashing '{}' for conversion as '{}'", image.name, key, indent=indent) self._report.msg("Stashing '{}' for conversion as '{}'", image.name, key)
self._pending[key] = [owner.key,] self._pending[key] = [owner.key,]
else: else:
self._report.msg("Found another user of '{}'", key, indent=indent) self._report.msg("Found another user of '{}'", key)
self._pending[key].append(owner.key) self._pending[key].append(owner.key)
def finalize(self): def finalize(self):
@ -1064,43 +1061,44 @@ class MaterialConverter:
pClassName = "CubicEnvironmap" if key.is_cube_map else "Mipmap" pClassName = "CubicEnvironmap" if key.is_cube_map else "Mipmap"
self._report.msg("\n[{} '{}']", pClassName, name) self._report.msg("\n[{} '{}']", pClassName, name)
image = key.image with self._report.indent():
image = key.image
# Now we try to use the pile of hints we were given to figure out what format to use
allowed_formats = key.allowed_formats # Now we try to use the pile of hints we were given to figure out what format to use
if key.mipmap: allowed_formats = key.allowed_formats
compression = plBitmap.kDirectXCompression if key.mipmap:
elif "PNG" in allowed_formats and self._mgr.getVer() == pvMoul: compression = plBitmap.kDirectXCompression
compression = plBitmap.kPNGCompression elif "PNG" in allowed_formats and self._mgr.getVer() == pvMoul:
elif "DDS" in allowed_formats: compression = plBitmap.kPNGCompression
compression = plBitmap.kDirectXCompression elif "DDS" in allowed_formats:
elif "JPG" in allowed_formats: compression = plBitmap.kDirectXCompression
compression = plBitmap.kJPEGCompression elif "JPG" in allowed_formats:
elif "BMP" in allowed_formats: compression = plBitmap.kJPEGCompression
compression = plBitmap.kUncompressed elif "BMP" in allowed_formats:
else: compression = plBitmap.kUncompressed
raise RuntimeError(allowed_formats) else:
dxt = plBitmap.kDXT5 if key.alpha_type == TextureAlpha.full else plBitmap.kDXT1 raise RuntimeError(allowed_formats)
dxt = plBitmap.kDXT5 if key.alpha_type == TextureAlpha.full else plBitmap.kDXT1
# Mayhaps we have a cached version of this that has already been exported
cached_image = texcache.get_from_texture(key, compression)
if cached_image is None: # Mayhaps we have a cached version of this that has already been exported
numLevels, width, height, data = self._finalize_cache(texcache, key, image, name, compression, dxt) cached_image = texcache.get_from_texture(key, compression)
self._finalize_bitmap(key, owners, name, numLevels, width, height, compression, dxt, data)
else:
width, height = cached_image.export_size
data = cached_image.image_data
numLevels = cached_image.mip_levels
# If the cached image data is junk, PyHSPlasma will raise a RuntimeError, if cached_image is None:
# so we'll attempt a recache...
try:
self._finalize_bitmap(key, owners, name, numLevels, width, height, compression, dxt, data)
except RuntimeError:
self._report.warn("Cached image is corrupted! Recaching image...", indent=1)
numLevels, width, height, data = self._finalize_cache(texcache, key, image, name, compression, dxt) numLevels, width, height, data = self._finalize_cache(texcache, key, image, name, compression, dxt)
self._finalize_bitmap(key, owners, name, numLevels, width, height, compression, dxt, data) self._finalize_bitmap(key, owners, name, numLevels, width, height, compression, dxt, data)
else:
width, height = cached_image.export_size
data = cached_image.image_data
numLevels = cached_image.mip_levels
# If the cached image data is junk, PyHSPlasma will raise a RuntimeError,
# so we'll attempt a recache...
try:
self._finalize_bitmap(key, owners, name, numLevels, width, height, compression, dxt, data)
except RuntimeError:
self._report.warn("Cached image is corrupted! Recaching image...")
numLevels, width, height, data = self._finalize_cache(texcache, key, image, name, compression, dxt)
self._finalize_bitmap(key, owners, name, numLevels, width, height, compression, dxt, data)
inc_progress() inc_progress()
@ -1111,45 +1109,46 @@ class MaterialConverter:
# business to account for per-page textures # business to account for per-page textures
pages = {} pages = {}
self._report.msg("Adding to...", indent=1) self._report.msg("Adding to...")
for owner_key in owners: with self._report.indent():
owner = owner_key.object for owner_key in owners:
self._report.msg("[{} '{}']", owner.ClassName()[2:], owner_key.name, indent=2) owner = owner_key.object
page = mgr.get_textures_page(owner_key) # Layer's page or Textures.prp self._report.msg(f"[{owner.ClassName()[2:]} '{owner_key.name}']")
page = mgr.get_textures_page(owner_key) # Layer's page or Textures.prp
# If we haven't created this texture in the page (either layer's page or Textures.prp),
# then we need to do that and stuff the level data. This is a little tedious, but we # If we haven't created this texture in the page (either layer's page or Textures.prp),
# need to be careful to manage our resources correctly # then we need to do that and stuff the level data. This is a little tedious, but we
if page not in pages: # need to be careful to manage our resources correctly
mipmap = plMipmap(name=name, width=width, height=height, numLevels=numLevels, if page not in pages:
compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt) mipmap = plMipmap(name=name, width=width, height=height, numLevels=numLevels,
if key.is_cube_map: compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt)
assert len(data) == 6 if key.is_cube_map:
texture = plCubicEnvironmap(name) assert len(data) == 6
for face_name, face_data in zip(BLENDER_CUBE_MAP, data): texture = plCubicEnvironmap(name)
for face_name, face_data in zip(BLENDER_CUBE_MAP, data):
for i in range(numLevels):
mipmap.setLevel(i, face_data[i])
setattr(texture, face_name, mipmap)
else:
assert len(data) == 1
for i in range(numLevels): for i in range(numLevels):
mipmap.setLevel(i, face_data[i]) mipmap.setLevel(i, data[0][i])
setattr(texture, face_name, mipmap) texture = mipmap
else:
assert len(data) == 1
for i in range(numLevels):
mipmap.setLevel(i, data[0][i])
texture = mipmap
mgr.AddObject(page, texture) mgr.AddObject(page, texture)
pages[page] = texture pages[page] = texture
else: else:
texture = pages[page] texture = pages[page]
# The object that references this image can be either a layer (will appear # The object that references this image can be either a layer (will appear
# in the 3d world) or an image library (will appear in a journal or in another # in the 3d world) or an image library (will appear in a journal or in another
# dynamic manner in game) # dynamic manner in game)
if isinstance(owner, plLayerInterface): if isinstance(owner, plLayerInterface):
owner.texture = texture.key owner.texture = texture.key
elif isinstance(owner, plImageLibMod): elif isinstance(owner, plImageLibMod):
owner.addImage(texture.key) owner.addImage(texture.key)
else: else:
raise NotImplementedError(owner.ClassName()) raise NotImplementedError(owner.ClassName())
def _finalize_cache(self, texcache, key, image, name, compression, dxt): def _finalize_cache(self, texcache, key, image, name, compression, dxt):
if key.is_cube_map: if key.is_cube_map:
@ -1162,7 +1161,7 @@ class MaterialConverter:
def _finalize_cube_map(self, key, image, name, compression, dxt): def _finalize_cube_map(self, key, image, name, compression, dxt):
oWidth, oHeight = image.size oWidth, oHeight = image.size
if oWidth == 0 and oHeight == 0: if oWidth == 0 and oHeight == 0:
raise ExportError("Image '{}' could not be loaded.".format(image.name)) raise ExportError(f"Image '{image.name}' could not be loaded.")
# Non-DXT images are BGRA in Plasma # Non-DXT images are BGRA in Plasma
bgra = compression != plBitmap.kDirectXCompression bgra = compression != plBitmap.kDirectXCompression
@ -1177,7 +1176,7 @@ class MaterialConverter:
# something funky. # something funky.
if oWidth != cWidth or oHeight != cHeight: if oWidth != cWidth or oHeight != cHeight:
self._report.warn("Image was resized by Blender to ({}x{})--resizing the resize to ({}x{})", self._report.warn("Image was resized by Blender to ({}x{})--resizing the resize to ({}x{})",
cWidth, cHeight, oWidth, oHeight, indent=1) cWidth, cHeight, oWidth, oHeight)
data = scale_image(data, cWidth, cHeight, oWidth, oHeight) data = scale_image(data, cWidth, cHeight, oWidth, oHeight)
# Face dimensions # Face dimensions
@ -1213,14 +1212,14 @@ class MaterialConverter:
name = face_name[:-4].upper() name = face_name[:-4].upper()
if compression == plBitmap.kDirectXCompression: if compression == plBitmap.kDirectXCompression:
numLevels = glimage.num_levels numLevels = glimage.num_levels
self._report.msg("Generating mip levels for cube face '{}'", name, indent=1) self._report.msg("Generating mip levels for cube face '{}'", name)
# If we're compressing this mofo, we'll need a temporary mipmap to do that here... # If we're compressing this mofo, we'll need a temporary mipmap to do that here...
mipmap = plMipmap(name=name, width=eWidth, height=eHeight, numLevels=numLevels, mipmap = plMipmap(name=name, width=eWidth, height=eHeight, numLevels=numLevels,
compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt) compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt)
else: else:
numLevels = 1 numLevels = 1
self._report.msg("Compressing single level for cube face '{}'", name, indent=1) self._report.msg("Compressing single level for cube face '{}'", name)
face_images[i] = [None] * numLevels face_images[i] = [None] * numLevels
for j in range(numLevels): for j in range(numLevels):
@ -1244,7 +1243,7 @@ class MaterialConverter:
eWidth, eHeight = glimage.size_pot eWidth, eHeight = glimage.size_pot
if compression == plBitmap.kDirectXCompression: if compression == plBitmap.kDirectXCompression:
numLevels = glimage.num_levels numLevels = glimage.num_levels
self._report.msg("Generating mip levels", indent=1) self._report.msg("Generating mip levels")
# If this is a DXT-compressed mipmap, we need to use a temporary mipmap # If this is a DXT-compressed mipmap, we need to use a temporary mipmap
# to do the compression. We'll then steal the data from it. # to do the compression. We'll then steal the data from it.
@ -1252,7 +1251,7 @@ class MaterialConverter:
compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt) compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt)
else: else:
numLevels = 1 numLevels = 1
self._report.msg("Compressing single level", indent=1) self._report.msg("Compressing single level")
# Hold the uncompressed level data for now. We may have to make multiple copies of # Hold the uncompressed level data for now. We may have to make multiple copies of
# this mipmap for per-page textures :( # this mipmap for per-page textures :(
@ -1281,7 +1280,7 @@ class MaterialConverter:
yield from filter(None, self._obj2layer[bo][bm][tex]) yield from filter(None, self._obj2layer[bo][bm][tex])
return return
if bo is None and bm is None and tex is None: if bo is None and bm is None and tex is None:
self._exporter().report.warn("Asking for all the layers we've ever exported, eh? You like living dangerously.", indent=2) self._exporter().report.warn("Asking for all the layers we've ever exported, eh? You like living dangerously.")
# What we want to do is filter _obj2layers: # What we want to do is filter _obj2layers:
# bo if set, or all objects # bo if set, or all objects

31
korman/exporter/mesh.py

@ -357,22 +357,24 @@ class MeshConverter(_MeshManager):
self._report.progress_range = len(self._dspans) self._report.progress_range = len(self._dspans)
inc_progress = self._report.progress_increment inc_progress = self._report.progress_increment
log_msg = self._report.msg log_msg = self._report.msg
indent = self._report.indent
log_msg("\nFinalizing Geometry") log_msg("\nFinalizing Geometry")
for loc in self._dspans.values(): with indent():
for dspan in loc.values(): for loc in self._dspans.values():
log_msg("[DrawableSpans '{}']", dspan.key.name, indent=1) for dspan in loc.values():
log_msg("[DrawableSpans '{}']", dspan.key.name)
# This mega-function does a lot:
# 1. Converts SourceSpans (geospans) to Icicles and bakes geometry into plGBuffers # This mega-function does a lot:
# 2. Calculates the Icicle bounds # 1. Converts SourceSpans (geospans) to Icicles and bakes geometry into plGBuffers
# 3. Builds the plSpaceTree # 2. Calculates the Icicle bounds
# 4. Clears the SourceSpans # 3. Builds the plSpaceTree
dspan.composeGeometry(True, True) # 4. Clears the SourceSpans
inc_progress() dspan.composeGeometry(True, True)
inc_progress()
def _export_geometry(self, bo, mesh, materials, geospans, mat2span_LUT): def _export_geometry(self, bo, mesh, materials, geospans, mat2span_LUT):
self._report.msg("Converting geometry from '{}'...", mesh.name, indent=1) self._report.msg(f"Converting geometry from '{mesh.name}'...")
# Recall that materials is a mapping of exported materials to blender material indices. # Recall that materials is a mapping of exported materials to blender material indices.
# Therefore, geodata maps blender material indices to working geometry data. # Therefore, geodata maps blender material indices to working geometry data.
@ -633,7 +635,7 @@ class MeshConverter(_MeshManager):
for i in geospans: for i in geospans:
dspan = self._find_create_dspan(bo, i.geospan, i.pass_index) dspan = self._find_create_dspan(bo, i.geospan, i.pass_index)
self._report.msg("Exported hsGMaterial '{}' geometry into '{}'", self._report.msg("Exported hsGMaterial '{}' geometry into '{}'",
i.geospan.material.name, dspan.key.name, indent=1) i.geospan.material.name, dspan.key.name)
idx = dspan.addSourceSpan(i.geospan) idx = dspan.addSourceSpan(i.geospan)
diidx = _diindices.setdefault(dspan, []) diidx = _diindices.setdefault(dspan, [])
diidx.append(idx) diidx.append(idx)
@ -652,8 +654,7 @@ class MeshConverter(_MeshManager):
waveset_mod = bo.plasma_modifiers.water_basic waveset_mod = bo.plasma_modifiers.water_basic
if waveset_mod.enabled: if waveset_mod.enabled:
if len(materials) > 1: if len(materials) > 1:
msg = "'{}' is a WaveSet -- only one material is supported".format(bo.name) self._report.warn(f"'{bo.name}' is a WaveSet -- only one material is supported")
self._exporter().report.warn(msg, indent=1)
blmat = materials[0][1] blmat = materials[0][1]
self._check_vtx_nonpreshaded(bo, mesh, 0, blmat) self._check_vtx_nonpreshaded(bo, mesh, 0, blmat)
matKey = self.material.export_waveset_material(bo, blmat) matKey = self.material.export_waveset_material(bo, blmat)

8
korman/exporter/outfile.py

@ -318,11 +318,11 @@ class OutputFiles:
py_code = "{}\n\n{}\n".format(i.file_data, plasma_python_glue) py_code = "{}\n\n{}\n".format(i.file_data, plasma_python_glue)
else: else:
py_code = i.file_data py_code = i.file_data
result, pyc = korlib.compyle(i.filename, py_code, py_version, report, indent=1) result, pyc = korlib.compyle(i.filename, py_code, py_version, report)
if result: if result:
pyc_objects.append((i.filename, pyc)) pyc_objects.append((i.filename, pyc))
except korlib.PythonNotAvailableError as error: except korlib.PythonNotAvailableError as error:
report.warn("Python {} is not available. Your Age scripts were not packaged.", error, indent=1) report.warn(f"Python {error} is not available. Your Age scripts were not packaged.")
else: else:
if pyc_objects: if pyc_objects:
with self.generate_dat_file("{}.pak".format(self._exporter().age_name), with self.generate_dat_file("{}.pak".format(self._exporter().age_name),
@ -392,7 +392,7 @@ class OutputFiles:
shutil.copy2(i.file_path, dst_path) shutil.copy2(i.file_path, dst_path)
else: else:
report.warn("No data found for dependency file '{}'. It will not be copied into the export directory.", report.warn("No data found for dependency file '{}'. It will not be copied into the export directory.",
PurePath(i.dirname, i.filename), indent=1) PurePath(i.dirname, i.filename))
def _write_gather_build(self): def _write_gather_build(self):
report = self._exporter().report report = self._exporter().report
@ -458,7 +458,7 @@ class OutputFiles:
elif i.file_path: elif i.file_path:
zf.write(i.file_path, arcpath) zf.write(i.file_path, arcpath)
else: else:
report.warn("No data found for dependency file '{}'. It will not be archived.", arcpath, indent=1) report.warn(f"No data found for dependency file '{arcpath}'. It will not be archived.")
@property @property
def _version(self): def _version(self):

6
korman/exporter/physics.py

@ -261,7 +261,7 @@ class PhysicsConverter:
member_group = getattr(plSimDefs, kwargs.get("member_group", "kGroupLOSOnly")) member_group = getattr(plSimDefs, kwargs.get("member_group", "kGroupLOSOnly"))
if physical.memberGroup != member_group and member_group != plSimDefs.kGroupLOSOnly: if physical.memberGroup != member_group and member_group != plSimDefs.kGroupLOSOnly:
self._report.warn("{}: Physical memberGroup overwritten!", bo.name, indent=2) self._report.warn(f"{bo.name}: Physical memberGroup overwritten!")
physical.memberGroup = member_group physical.memberGroup = member_group
# Sanity checking: only TPotS/Havok fully supports triangle mesh detector regions. # Sanity checking: only TPotS/Havok fully supports triangle mesh detector regions.
@ -271,7 +271,7 @@ class PhysicsConverter:
if physical.memberGroup == plSimDefs.kGroupDetector and physical.boundsType in (plSimDefs.kExplicitBounds, plSimDefs.kProxyBounds): if physical.memberGroup == plSimDefs.kGroupDetector and physical.boundsType in (plSimDefs.kExplicitBounds, plSimDefs.kProxyBounds):
msg = f"'{bo.name}': Triangle mesh regions are poorly supported. Use a convex hull or box instead." msg = f"'{bo.name}': Triangle mesh regions are poorly supported. Use a convex hull or box instead."
if ver <= pvPots: if ver <= pvPots:
self._report.port(msg, indent=2) self._report.port(msg)
else: else:
raise ExportError(msg) raise ExportError(msg)
@ -297,7 +297,7 @@ class PhysicsConverter:
if volume < 0.001: if volume < 0.001:
self._report.warn( self._report.warn(
"{}: Physical wants to be a convex hull but appears to be flat (volume={}), forcing to triangle mesh...", "{}: Physical wants to be a convex hull but appears to be flat (volume={}), forcing to triangle mesh...",
bo.name, volume, indent=2 bo.name, volume
) )
self._export_trimesh(bo, physical, local_space, mat) self._export_trimesh(bo, physical, local_space, mat)

8
korman/exporter/python.py

@ -52,7 +52,7 @@ class PythonPackageExporter:
code = source code = source
code = "{}\n\n{}\n".format(code, plasma_python_glue) code = "{}\n\n{}\n".format(code, plasma_python_glue)
success, result = korlib.compyle(filename, code, py_version, report, indent=1) success, result = korlib.compyle(filename, code, py_version, report)
if not success: if not success:
raise ExportError("Failed to compyle '{}':\n{}".format(filename, result)) raise ExportError("Failed to compyle '{}':\n{}".format(filename, result))
py_code.append((filename, result)) py_code.append((filename, result))
@ -68,7 +68,7 @@ class PythonPackageExporter:
code = source code = source
# no glue needed here, ma! # no glue needed here, ma!
success, result = korlib.compyle(filename, code, py_version, report, indent=1) success, result = korlib.compyle(filename, code, py_version, report)
if not success: if not success:
raise ExportError("Failed to compyle '{}':\n{}".format(filename, result)) raise ExportError("Failed to compyle '{}':\n{}".format(filename, result))
py_code.append((filename, result)) py_code.append((filename, result))
@ -88,10 +88,10 @@ class PythonPackageExporter:
if age_py.plasma_text.package or age.python_method == "all": if age_py.plasma_text.package or age.python_method == "all":
self._pfms[py_filename] = age_py self._pfms[py_filename] = age_py
else: else:
report.warn("AgeSDL Python Script provided, but not requested for packing... Using default Python.", indent=1) report.warn("AgeSDL Python Script provided, but not requested for packing... Using default Python.")
self._pfms[py_filename] = very_very_special_python.format(age_name=fixed_agename) self._pfms[py_filename] = very_very_special_python.format(age_name=fixed_agename)
else: else:
report.msg("Packing default AgeSDL Python", indent=1) report.msg("Packing default AgeSDL Python")
very_very_special_python.format(age_name=age_props.age_name) very_very_special_python.format(age_name=age_props.age_name)
self._pfms[py_filename] = very_very_special_python.format(age_name=fixed_agename) self._pfms[py_filename] = very_very_special_python.format(age_name=fixed_agename)

106
korman/exporter/rtlight.py

@ -50,19 +50,19 @@ class LightConverter:
# If you change these calculations, be sure to update the AnimationConverter! # If you change these calculations, be sure to update the AnimationConverter!
intens, attenEnd = self.convert_attenuation(bl) intens, attenEnd = self.convert_attenuation(bl)
if bl.falloff_type == "CONSTANT": if bl.falloff_type == "CONSTANT":
self._report.msg("Attenuation: No Falloff", indent=2) self._report.msg("Attenuation: No Falloff")
pl.attenConst = intens pl.attenConst = intens
pl.attenLinear = 0.0 pl.attenLinear = 0.0
pl.attenQuadratic = 0.0 pl.attenQuadratic = 0.0
pl.attenCutoff = attenEnd pl.attenCutoff = attenEnd
elif bl.falloff_type == "INVERSE_LINEAR": elif bl.falloff_type == "INVERSE_LINEAR":
self._report.msg("Attenuation: Inverse Linear", indent=2) self._report.msg("Attenuation: Inverse Linear")
pl.attenConst = 1.0 pl.attenConst = 1.0
pl.attenLinear = self.convert_attenuation_linear(intens, attenEnd) pl.attenLinear = self.convert_attenuation_linear(intens, attenEnd)
pl.attenQuadratic = 0.0 pl.attenQuadratic = 0.0
pl.attenCutoff = attenEnd pl.attenCutoff = attenEnd
elif bl.falloff_type == "INVERSE_SQUARE": elif bl.falloff_type == "INVERSE_SQUARE":
self._report.msg("Attenuation: Inverse Square", indent=2) self._report.msg("Attenuation: Inverse Square")
pl.attenConst = 1.0 pl.attenConst = 1.0
pl.attenLinear = 0.0 pl.attenLinear = 0.0
pl.attenQuadratic = self.convert_attenuation_quadratic(intens, attenEnd) pl.attenQuadratic = self.convert_attenuation_quadratic(intens, attenEnd)
@ -82,19 +82,21 @@ class LightConverter:
return max(0.0, (intensity * _FAR_POWER - 1.0) / pow(end, 2)) return max(0.0, (intensity * _FAR_POWER - 1.0) / pow(end, 2))
def _convert_area_lamp(self, bl, pl): def _convert_area_lamp(self, bl, pl):
self._report.msg("[LimitedDirLightInfo '{}']", bl.name, indent=1) self._report.msg("[LimitedDirLightInfo '{}']", bl.name)
pl.width = bl.size pl.width = bl.size
pl.depth = bl.size if bl.shape == "SQUARE" else bl.size_y pl.depth = bl.size if bl.shape == "SQUARE" else bl.size_y
pl.height = bl.plasma_lamp.size_height pl.height = bl.plasma_lamp.size_height
def _convert_point_lamp(self, bl, pl): def _convert_point_lamp(self, bl, pl):
self._report.msg("[OmniLightInfo '{}']", bl.name, indent=1) self._report.msg("[OmniLightInfo '{}']", bl.name)
self._convert_attenuation(bl, pl) with self._report.indent():
self._convert_attenuation(bl, pl)
def _convert_spot_lamp(self, bl, pl): def _convert_spot_lamp(self, bl, pl):
self._report.msg("[SpotLightInfo '{}']", bl.name, indent=1) self._report.msg("[SpotLightInfo '{}']", bl.name)
self._convert_attenuation(bl, pl) with self._report.indent():
self._convert_attenuation(bl, pl)
# Spot lights have a few more things... # Spot lights have a few more things...
spot_size = bl.spot_size spot_size = bl.spot_size
@ -109,7 +111,7 @@ class LightConverter:
pl.falloff = 1.0 pl.falloff = 1.0
def _convert_sun_lamp(self, bl, pl): def _convert_sun_lamp(self, bl, pl):
self._report.msg("[DirectionalLightInfo '{}']", bl.name, indent=1) self._report.msg("[DirectionalLightInfo '{}']", bl.name)
def export_rtlight(self, so, bo): def export_rtlight(self, so, bo):
bl_light = bo.data bl_light = bo.data
@ -139,18 +141,18 @@ class LightConverter:
# Apply the colors # Apply the colors
if bl_light.use_diffuse: if bl_light.use_diffuse:
self._report.msg("Diffuse: {}", diff_str, indent=2) self._report.msg(f"Diffuse: {diff_str}")
pl_light.diffuse = hsColorRGBA(*diff_color) pl_light.diffuse = hsColorRGBA(*diff_color)
else: else:
self._report.msg("Diffuse: OFF", indent=2) self._report.msg("Diffuse: OFF")
pl_light.diffuse = hsColorRGBA(0.0, 0.0, 0.0, energy) pl_light.diffuse = hsColorRGBA(0.0, 0.0, 0.0, energy)
if bl_light.use_specular: if bl_light.use_specular:
self._report.msg("Specular: {}", spec_str, indent=2) self._report.msg(f"Specular: {spec_str}")
pl_light.setProperty(plLightInfo.kLPHasSpecular, True) pl_light.setProperty(plLightInfo.kLPHasSpecular, True)
pl_light.specular = hsColorRGBA(*spec_color) pl_light.specular = hsColorRGBA(*spec_color)
else: else:
self._report.msg("Specular: OFF", indent=2) self._report.msg(f"Specular: OFF")
pl_light.specular = hsColorRGBA(0.0, 0.0, 0.0, energy) pl_light.specular = hsColorRGBA(0.0, 0.0, 0.0, energy)
rtlamp = bl_light.plasma_lamp rtlamp = bl_light.plasma_lamp
@ -207,7 +209,7 @@ class LightConverter:
# projection Lamp with our own faux Material. Unfortunately, Plasma only supports projecting # projection Lamp with our own faux Material. Unfortunately, Plasma only supports projecting
# one layer. We could exploit the fUnderLay and fOverLay system to export everything, but meh. # one layer. We could exploit the fUnderLay and fOverLay system to export everything, but meh.
if len(tex_slots) > 1: if len(tex_slots) > 1:
self._report.warn("Only one texture slot can be exported per Lamp. Picking the first one: '{}'".format(slot.name), indent=3) self._report.warn(f"Only one texture slot can be exported per Lamp. Picking the first one: '{slot.name}'")
layer = mat.export_texture_slot(bo, None, None, slot, 0, blend_flags=False) layer = mat.export_texture_slot(bo, None, None, slot, 0, blend_flags=False)
state = layer.state state = layer.state
@ -250,50 +252,50 @@ class LightConverter:
def find_material_light_keys(self, bo, bm): def find_material_light_keys(self, bo, bm):
"""Given a blender material, we find the keys of all matching Plasma RT Lights. """Given a blender material, we find the keys of all matching Plasma RT Lights.
NOTE: We return a tuple of lists: ([permaLights], [permaProjs])""" NOTE: We return a tuple of lists: ([permaLights], [permaProjs])"""
self._report.msg("Searching for runtime lights...", indent=1) self._report.msg("Searching for runtime lights...")
permaLights = [] permaLights = []
permaProjs = [] permaProjs = []
# We're going to inspect the material's light group. with self._report.indent():
# If there is no light group, we'll say that there is no runtime lighting... # We're going to inspect the material's light group.
# If there is, we will harvest all Blender lamps in that light group that are Plasma Objects # If there is no light group, we'll say that there is no runtime lighting...
lg = bm.light_group # If there is, we will harvest all Blender lamps in that light group that are Plasma Objects
if lg is not None: lg = bm.light_group
for obj in lg.objects: if lg is not None:
if obj.type != "LAMP": for obj in lg.objects:
# moronic... if obj.type != "LAMP":
continue # moronic...
elif not obj.plasma_object.enabled:
# who cares?
continue
lamp = obj.data
# Check to see if they only want this light to work on its layer...
if lamp.use_own_layer:
# Pairs up elements from both layers sequences such that we can compare
# to see if the lamp and object are in the same layer.
# If you can think of a better way, be my guest.
test = zip(bo.layers, obj.layers)
for i in test:
if i == (True, True):
break
else:
# didn't find a layer where both lamp and object were, skip it.
self._report.msg("[{}] '{}': not in same layer, skipping...",
lamp.type, obj.name, indent=2)
continue continue
elif not obj.plasma_object.enabled:
# This is probably where PermaLight vs PermaProj should be sorted out... # who cares?
pl_light = self.get_light_key(obj, lamp, None) continue
if self._is_projection_lamp(lamp): lamp = obj.data
self._report.msg("[{}] PermaProj '{}'", lamp.type, obj.name, indent=2)
permaProjs.append(pl_light) # Check to see if they only want this light to work on its layer...
else: if lamp.use_own_layer:
self._report.msg("[{}] PermaLight '{}'", lamp.type, obj.name, indent=2) # Pairs up elements from both layers sequences such that we can compare
permaLights.append(pl_light) # to see if the lamp and object are in the same layer.
# If you can think of a better way, be my guest.
test = zip(bo.layers, obj.layers)
for i in test:
if i == (True, True):
break
else:
# didn't find a layer where both lamp and object were, skip it.
self._report.msg(f"[{lamp.type}] '{obj.name}': not in same layer, skipping...")
continue
# This is probably where PermaLight vs PermaProj should be sorted out...
pl_light = self.get_light_key(obj, lamp, None)
if self._is_projection_lamp(lamp):
self._report.msg(f"[{lamp.type}] PermaProj '{obj.name}'")
permaProjs.append(pl_light)
else:
self._report.msg(f"[{lamp.type}] PermaLight '{obj.name}'", lamp.type, obj.name)
permaLights.append(pl_light)
if len(permaLights) > 8: if len(permaLights) > 8:
self._report.warn("More than 8 RT lamps on material: '{}'", bm.name, indent=1) self._report.warn(f"More than 8 RT lamps on material: '{bm.name}'")
return (permaLights, permaProjs) return (permaLights, permaProjs)

10
korman/korlib/python.py

@ -24,7 +24,7 @@ class PythonNotAvailableError(Exception):
pass pass
def compyle(file_name, py_code, py_version, report=None, indent=0): def compyle(file_name, py_code, py_version, report=None):
# NOTE: Should never run under Python 2.x # NOTE: Should never run under Python 2.x
my_version = sys.version_info[:2] my_version = sys.version_info[:2]
assert my_version == (2, 7) or my_version[0] > 2 assert my_version == (2, 7) or my_version[0] > 2
@ -34,10 +34,10 @@ def compyle(file_name, py_code, py_version, report=None, indent=0):
if idx == -1: if idx == -1:
module_name = file_name module_name = file_name
else: else:
module_name = file_name[:idx] module_name = file_name[:idx]
if report is not None: if report is not None:
report.msg("Compyling {}", file_name, indent=indent) report.msg("Compyling {}", file_name)
if my_version != py_version: if my_version != py_version:
import subprocess import subprocess
@ -48,7 +48,7 @@ def compyle(file_name, py_code, py_version, report=None, indent=0):
py_code = py_code.encode("utf-8") py_code = py_code.encode("utf-8")
except UnicodeError: except UnicodeError:
if report is not None: if report is not None:
report.error("Could not encode '{}'", file_name, indent=indent+1) report.error("Could not encode '{}'", file_name, indent=report.indent_level+1)
return (False, "Could not encode file") return (False, "Could not encode file")
result = subprocess.run(args, input=py_code, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) result = subprocess.run(args, input=py_code, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
if result.returncode != 0: if result.returncode != 0:
@ -57,7 +57,7 @@ def compyle(file_name, py_code, py_version, report=None, indent=0):
except UnicodeError: except UnicodeError:
error = result.stdout error = result.stdout
if report is not None: if report is not None:
report.error("Compylation Error in '{}'\n{}", file_name, error, indent=indent+1) report.error("Compylation Error in '{}'\n{}", file_name, error, indent=report.indent_level+1)
return (result.returncode == 0, result.stdout) return (result.returncode == 0, result.stdout)
else: else:
raise NotImplementedError() raise NotImplementedError()

4
korman/korlib/texture.py

@ -160,7 +160,7 @@ class GLTexture:
self._texkey.detail_opacity_start / 100.0, self._texkey.detail_opacity_start / 100.0,
self._texkey.detail_opacity_stop / 100.0) self._texkey.detail_opacity_stop / 100.0)
def get_level_data(self, level=0, calc_alpha=False, report=None, indent=2, fast=False): def get_level_data(self, level=0, calc_alpha=False, report=None, fast=False):
"""Gets the uncompressed pixel data for a requested mip level, optionally calculating the alpha """Gets the uncompressed pixel data for a requested mip level, optionally calculating the alpha
channel from the image color data channel from the image color data
""" """
@ -175,7 +175,7 @@ class GLTexture:
eHeight = ensure_power_of_two(oHeight) >> level eHeight = ensure_power_of_two(oHeight) >> level
if report is not None: if report is not None:
report.msg("Level #{}: {}x{}", level, eWidth, eHeight, indent=indent) report.msg("Level #{}: {}x{}", level, eWidth, eHeight)
# Scale, if needed... # Scale, if needed...
if oWidth != eWidth or oHeight != eHeight: if oWidth != eWidth or oHeight != eHeight:

4
korman/nodes/node_avatar.py

@ -70,7 +70,7 @@ class PlasmaSittingBehaviorNode(PlasmaNodeBase, bpy.types.Node):
if i is not None: if i is not None:
sitmod.addNotifyKey(i.get_key(exporter, so)) sitmod.addNotifyKey(i.get_key(exporter, so))
else: else:
exporter.report.warn("'{}' Node '{}' doesn't expose a key. It won't be triggered by '{}'!".format(i.bl_idname, i.name, self.name), indent=3) exporter.report.warn(f"'{i.bl_idname}' Node '{i.name}' doesn't expose a key. It won't be triggered by '{self.name}'!")
@property @property
def requires_actor(self): def requires_actor(self):
@ -401,7 +401,7 @@ class PlasmaMultiStageBehaviorNode(PlasmaNodeBase, bpy.types.Node):
msbmod.addReceiver(key) msbmod.addReceiver(key)
else: else:
exporter.report.warn("'{}' Node '{}' doesn't expose a key. It won't be triggered by '{}'!", exporter.report.warn("'{}' Node '{}' doesn't expose a key. It won't be triggered by '{}'!",
node.bl_idname, node.name, self.name, indent=3) node.bl_idname, node.name, self.name)
@property @property
def requires_actor(self): def requires_actor(self):

11
korman/nodes/node_core.py

@ -28,7 +28,7 @@ class PlasmaNodeBase:
for i in self.find_outputs(socket_id, idname): for i in self.find_outputs(socket_id, idname):
key = i.get_key(exporter, so) key = i.get_key(exporter, so)
if key is None: if key is None:
exporter.report.warn(" '{}' Node '{}' doesn't expose a key. It won't be triggered by '{}'!".format(i.bl_idname, i.name, self.name), indent=3) exporter.report.warn(f"'{i.bl_idname}' Node '{i.name}' doesn't expose a key. It won't be triggered by '{self.name}'!")
elif isinstance(key, tuple): elif isinstance(key, tuple):
for i in key: for i in key:
notify.addReceiver(key) notify.addReceiver(key)
@ -498,10 +498,11 @@ class PlasmaNodeTree(bpy.types.NodeTree):
def export(self, exporter, bo, so): def export(self, exporter, bo, so):
exported_nodes = exporter.exported_nodes.setdefault(self.name, set()) exported_nodes = exporter.exported_nodes.setdefault(self.name, set())
for node in self.nodes: with exporter.report.indent():
if not (node.export_once and node.previously_exported(exporter)): for node in self.nodes:
node.export(exporter, bo, so) if not (node.export_once and node.previously_exported(exporter)):
exported_nodes.add(node.name) node.export(exporter, bo, so)
exported_nodes.add(node.name)
def find_output(self, idname): def find_output(self, idname):
for node in self.nodes: for node in self.nodes:

13
korman/nodes/node_python.py

@ -279,12 +279,12 @@ class PlasmaPythonFileNode(PlasmaVersionedNode, bpy.types.Node):
# Check to see if we should pack this file # Check to see if we should pack this file
if exporter.output.want_py_text(self.text_id): if exporter.output.want_py_text(self.text_id):
exporter.report.msg("Including Python '{}' for package", self.filename, indent=3) exporter.report.msg("Including Python '{}' for package", self.filename)
exporter.output.add_python_mod(self.filename, text_id=self.text_id) exporter.output.add_python_mod(self.filename, text_id=self.text_id)
# PFMs can have their own SDL... # PFMs can have their own SDL...
sdl_text = bpy.data.texts.get("{}.sdl".format(py_name), None) sdl_text = bpy.data.texts.get("{}.sdl".format(py_name), None)
if sdl_text is not None: if sdl_text is not None:
exporter.report.msg("Including corresponding SDL '{}'", sdl_text.name, indent=3) exporter.report.msg("Including corresponding SDL '{}'", sdl_text.name)
exporter.output.add_sdl(sdl_text.name, text_id=sdl_text) exporter.output.add_sdl(sdl_text.name, text_id=sdl_text)
# Handle exporting the Python Parameters # Handle exporting the Python Parameters
@ -312,14 +312,13 @@ class PlasmaPythonFileNode(PlasmaVersionedNode, bpy.types.Node):
# an animated lamp. # an animated lamp.
if not bool(bo.users_group): if not bool(bo.users_group):
for light in exporter.mgr.find_interfaces(plLightInfo, so): for light in exporter.mgr.find_interfaces(plLightInfo, so):
exporter.report.msg("Marking RT light '{}' as animated due to usage in a Python File node", exporter.report.msg(f"Marking RT light '{so.key.name}' as animated due to usage in a Python File node", so.key.name)
so.key.name, indent=3)
light.setProperty(plLightInfo.kLPMovable, True) light.setProperty(plLightInfo.kLPMovable, True)
def _export_key_attrib(self, exporter, bo, so : plSceneObject, key : plKey, socket) -> None: def _export_key_attrib(self, exporter, bo, so : plSceneObject, key : plKey, socket) -> None:
if key is None: if key is None:
exporter.report.warn("Attribute '{}' didn't return a key and therefore will be unavailable to Python", exporter.report.warn("Attribute '{}' didn't return a key and therefore will be unavailable to Python",
self.id_data.name, socket.links[0].name, indent=3) self.id_data.name, socket.links[0].name)
return return
key_type = _attrib_key_types[socket.attribute_type] key_type = _attrib_key_types[socket.attribute_type]
@ -330,7 +329,7 @@ class PlasmaPythonFileNode(PlasmaVersionedNode, bpy.types.Node):
if not good_key: if not good_key:
exporter.report.warn("'{}' Node '{}' returned an unexpected key type '{}'", exporter.report.warn("'{}' Node '{}' returned an unexpected key type '{}'",
self.id_data.name, socket.links[0].from_node.name, self.id_data.name, socket.links[0].from_node.name,
plFactory.ClassName(key.type), indent=3) plFactory.ClassName(key.type))
if isinstance(key.object, plSceneObject): if isinstance(key.object, plSceneObject):
self._export_ancillary_sceneobject(exporter, bo, key.object) self._export_ancillary_sceneobject(exporter, bo, key.object)
@ -948,7 +947,7 @@ class PlasmaAttribTextureNode(idprops.IDPropMixin, PlasmaAttribNodeBase, bpy.typ
remainder = sum((1 for i in result)) remainder = sum((1 for i in result))
if remainder > 1: if remainder > 1:
exporter.report.warn("'{}.{}': Expected a single layer, but mapped to {}. Make the settings more specific.", exporter.report.warn("'{}.{}': Expected a single layer, but mapped to {}. Make the settings more specific.",
self.id_data.name, self.path_from_id(), remainder + 1, indent=2) self.id_data.name, self.path_from_id(), remainder + 1)
if result is not None: if result is not None:
yield result yield result
else: else:

24
korman/operators/op_image.py

@ -124,10 +124,10 @@ class PlasmaBuildCubeMapOperator(ImageOperator, bpy.types.Operator):
face_path = filepath[:idx+1] + suffix + filepath[idx+3:] face_path = filepath[:idx+1] + suffix + filepath[idx+3:]
face_name = key[:-4].upper() face_name = key[:-4].upper()
if Path(face_path).is_file(): if Path(face_path).is_file():
self._report.msg("Found face '{}': {}", face_name, face_path, indent=1) self._report.msg("Found face '{}': {}", face_name, face_path)
files.append(face_path) files.append(face_path)
else: else:
self._report.warn("Using default face data for face '{}'", face_name, indent=1) self._report.warn("Using default face data for face '{}'", face_name)
files.append(None) files.append(None)
self._report.progress_increment() self._report.progress_increment()
return tuple(files) return tuple(files)
@ -226,14 +226,14 @@ class PlasmaBuildCubeMapOperator(ImageOperator, bpy.types.Operator):
# Insert grumbling here about tuples being immutable... # Insert grumbling here about tuples being immutable...
result_data = list(face_data) result_data = list(face_data)
for i in range(len(BLENDER_CUBE_MAP)): with self._report.indent():
face_width, face_height = face_widths[i], face_heights[i] for i in range(len(BLENDER_CUBE_MAP)):
if face_width != min_width or face_height != min_height: face_width, face_height = face_widths[i], face_heights[i]
face_name = BLENDER_CUBE_MAP[i][:-4].upper() if face_width != min_width or face_height != min_height:
self._report.msg("Resizing face '{}' from {}x{} to {}x{}", face_name, face_name = BLENDER_CUBE_MAP[i][:-4].upper()
face_width, face_height, min_width, min_height, self._report.msg("Resizing face '{}' from {}x{} to {}x{}", face_name,
indent=1) face_width, face_height, min_width, min_height)
result_data[i] = scale_image(face_data[i], face_width, face_height, result_data[i] = scale_image(face_data[i], face_width, face_height,
min_width, min_height) min_width, min_height)
self._report.progress_increment() self._report.progress_increment()
return min_width, min_height, tuple(result_data) return min_width, min_height, tuple(result_data)

14
korman/properties/modifiers/anim.py

@ -70,7 +70,8 @@ class PlasmaAnimationModifier(ActionModifier, PlasmaModifierProperties):
def convert_object_animations(self, exporter, bo, so, anims: Optional[Iterable] = None): def convert_object_animations(self, exporter, bo, so, anims: Optional[Iterable] = None):
if not anims: if not anims:
anims = [self.subanimations.entire_animation] anims = [self.subanimations.entire_animation]
aganims = list(self._export_ag_anims(exporter, bo, so, anims)) with exporter.report.indent():
aganims = list(self._export_ag_anims(exporter, bo, so, anims))
# Defer creation of the private animation until after the converter has been executed. # Defer creation of the private animation until after the converter has been executed.
# Just because we have some FCurves doesn't mean they will produce anything particularly # Just because we have some FCurves doesn't mean they will produce anything particularly
@ -100,8 +101,7 @@ class PlasmaAnimationModifier(ActionModifier, PlasmaModifierProperties):
applicators = converter.convert_object_animations(bo, so, anim_name, start=start, end=end) applicators = converter.convert_object_animations(bo, so, anim_name, start=start, end=end)
if not applicators: if not applicators:
exporter.report.warn("Animation '{}' generated no applicators. Nothing will be exported.", exporter.report.warn(f"Animation '{anim_name}' generated no applicators. Nothing will be exported.")
anim_name, indent=2)
continue continue
pClass = plAgeGlobalAnim if anim.sdl_var else plATCAnim pClass = plAgeGlobalAnim if anim.sdl_var else plATCAnim
@ -243,12 +243,12 @@ class PlasmaAnimationGroupModifier(ActionModifier, PlasmaModifierProperties):
continue continue
if not child_bo.plasma_object.has_animation_data: if not child_bo.plasma_object.has_animation_data:
msg = "Animation Group '{}' specifies an object '{}' with no valid animation data. Ignoring..." msg = "Animation Group '{}' specifies an object '{}' with no valid animation data. Ignoring..."
exporter.report.warn(msg, self.key_name, child_bo.name, indent=2) exporter.report.warn(msg, self.key_name, child_bo.name)
continue continue
child_animation = child_bo.plasma_modifiers.animation child_animation = child_bo.plasma_modifiers.animation
if not child_animation.enabled: if not child_animation.enabled:
msg = "Animation Group '{}' specifies an object '{}' with no Plasma Animation modifier. Ignoring..." msg = "Animation Group '{}' specifies an object '{}' with no Plasma Animation modifier. Ignoring..."
exporter.report.warn(msg, self.key_name, child_bo.name, indent=2) exporter.report.warn(msg, self.key_name, child_bo.name)
continue continue
child_agmod, child_agmaster = exporter.animation.get_anigraph_objects(bo=child_bo) child_agmod, child_agmaster = exporter.animation.get_anigraph_objects(bo=child_bo)
msgfwd.addForwardKey(child_agmaster.key) msgfwd.addForwardKey(child_agmaster.key)
@ -294,10 +294,10 @@ class PlasmaAnimationLoopModifier(ActionModifier, PlasmaModifierProperties):
end = markers.get(loop.loop_end) end = markers.get(loop.loop_end)
if start is None: if start is None:
exporter.report.warn("Animation '{}' Loop '{}': Marker '{}' not found. This loop will not be exported".format( exporter.report.warn("Animation '{}' Loop '{}': Marker '{}' not found. This loop will not be exported".format(
action.name, loop.loop_name, loop.loop_start), indent=2) action.name, loop.loop_name, loop.loop_start))
if end is None: if end is None:
exporter.report.warn("Animation '{}' Loop '{}': Marker '{}' not found. This loop will not be exported".format( exporter.report.warn("Animation '{}' Loop '{}': Marker '{}' not found. This loop will not be exported".format(
action.name, loop.loop_name, loop.loop_end), indent=2) action.name, loop.loop_name, loop.loop_end))
if start is None or end is None: if start is None or end is None:
continue continue
atcanim.setLoop(loop.loop_name, _convert_frame_time(start.frame), _convert_frame_time(end.frame)) atcanim.setLoop(loop.loop_name, _convert_frame_time(start.frame), _convert_frame_time(end.frame))

8
korman/properties/modifiers/gui.py

@ -126,11 +126,10 @@ class TranslationMixin:
def export_localization(self, exporter): def export_localization(self, exporter):
translations = [i for i in self.translations if i.text_id is not None] translations = [i for i in self.translations if i.text_id is not None]
if not translations: if not translations:
exporter.report.error("'{}': '{}' No content translations available. The localization will not be exported.", exporter.report.error(f"'{self.id_data.name}': '{self.bl_label}' No content translations available. The localization will not be exported.")
self.id_data.name, self.bl_label, indent=1)
return return
for i in translations: for i in translations:
exporter.locman.add_string(self.localization_set, self.key_name, i.language, i.text_id, indent=1) exporter.locman.add_string(self.localization_set, self.key_name, i.language, i.text_id)
def _get_translation(self): def _get_translation(self):
# Ensure there is always a default (read: English) translation available. # Ensure there is always a default (read: English) translation available.
@ -460,8 +459,7 @@ class PlasmaLinkingBookModifier(PlasmaModifierProperties, PlasmaModifierLogicWiz
def pre_export(self, exporter, bo): def pre_export(self, exporter, bo):
if not self._check_version(exporter.mgr.getVer()): if not self._check_version(exporter.mgr.getVer()):
# We aren't needed here # We aren't needed here
exporter.report.port("Object '{}' has a LinkingBookMod not enabled for export to the selected engine. Skipping.", exporter.report.port(f"Object '{self.id_data.name}' has a LinkingBookMod not enabled for export to the selected engine. Skipping.")
self.id_data.name, indent=2)
return return
# Auto-generate a six-foot cube region around the clickable if none was provided. # Auto-generate a six-foot cube region around the clickable if none was provided.

6
korman/properties/modifiers/physics.py

@ -174,9 +174,9 @@ class PlasmaSubworld(PlasmaModifierProperties):
# plCoordinateInterface::IGetRoot. Not really sure why this happens (nor do I care), # plCoordinateInterface::IGetRoot. Not really sure why this happens (nor do I care),
# but we definitely don't want it to happen. # but we definitely don't want it to happen.
if bo.type != "EMPTY": if bo.type != "EMPTY":
exporter.report.warn("Subworld '{}' is attached to a '{}'--this should be an empty.", bo.name, bo.type, indent=1) exporter.report.warn(f"Subworld '{bo.name}' is attached to a '{bo.type}'--this should be an empty.")
if so.sim: if so.sim:
if exporter.mgr.getVer() > pvPots: if exporter.mgr.getVer() > pvPots:
exporter.report.port("Subworld '{}' has physics data--this will cause PotS to crash.", bo.name, indent=1) exporter.report.port(f"Subworld '{bo.name}' has physics data--this will cause PotS to crash.")
else: else:
raise ExportError("Subworld '{}' cannot have physics data (should be an empty).".format(bo.name)) raise ExportError(f"Subworld '{bo.name}' cannot have physics data (should be an empty).")

3
korman/properties/modifiers/region.py

@ -340,8 +340,7 @@ class PlasmaSubworldRegion(PlasmaModifierProperties):
from_name, from_type = get_log_text(from_sub, from_isded) from_name, from_type = get_log_text(from_sub, from_isded)
to_name, to_type = get_log_text(to_sub, to_isded) to_name, to_type = get_log_text(to_sub, to_isded)
exporter.report.msg("Transition from '{}' ({}) to '{}' ({})", exporter.report.msg("Transition from '{}' ({}) to '{}' ({})",
from_name, from_type, to_name, to_type, from_name, from_type, to_name, to_type)
indent=2)
# I think the best solution here is to not worry about the excitement mentioned above. # I think the best solution here is to not worry about the excitement mentioned above.
# If we encounter anything truly interesting, we can fix it in CWE more easily IMO because # If we encounter anything truly interesting, we can fix it in CWE more easily IMO because

17
korman/properties/modifiers/render.py

@ -377,19 +377,17 @@ class PlasmaGrassShaderMod(PlasmaModifierProperties):
def export(self, exporter, bo, so): def export(self, exporter, bo, so):
if exporter.mgr.getVer() <= pvPots: if exporter.mgr.getVer() <= pvPots:
exporter.report.warn("Not supported on this version of Plasma", indent=3) exporter.report.warn("Not supported on this version of Plasma")
return return
else: else:
exporter.report.port("This will only function on MOUL and EOA", indent=3) exporter.report.port("This will only function on MOUL and EOA")
materials = exporter.mesh.material.get_materials(bo) materials = exporter.mesh.material.get_materials(bo)
if not materials: if not materials:
exporter.report.warn("No materials are associated with this object, no grass shader exported!", exporter.report.warn("No materials are associated with this object, no grass shader exported!")
indent=3)
return return
elif len(materials) > 1: elif len(materials) > 1:
exporter.report.warn("Ah, a multiple material grass shader, eh. You like living dangerously...", exporter.report.warn("Ah, a multiple material grass shader, eh. You like living dangerously...")
indent=3)
for material in materials: for material in materials:
mod = exporter.mgr.find_create_object(plGrassShaderMod, so=so, name=material.name) mod = exporter.mgr.find_create_object(plGrassShaderMod, so=so, name=material.name)
@ -511,8 +509,7 @@ class PlasmaLightMapGen(idprops.IDPropMixin, PlasmaModifierProperties, PlasmaMod
mat_mgr.export_prepared_image(owner=layer, image=lightmap_im, mat_mgr.export_prepared_image(owner=layer, image=lightmap_im,
allowed_formats={"PNG", "JPG"}, allowed_formats={"PNG", "JPG"},
extension="hsm", extension="hsm",
ephemeral=True, ephemeral=True)
indent=2)
@classmethod @classmethod
def _idprop_mapping(cls): def _idprop_mapping(cls):
@ -914,14 +911,14 @@ class PlasmaVisControl(idprops.IDPropObjectMixin, PlasmaModifierProperties):
else: else:
this_sv = bo.plasma_modifiers.softvolume this_sv = bo.plasma_modifiers.softvolume
if this_sv.enabled: if this_sv.enabled:
exporter.report.msg("[VisRegion] I'm a SoftVolume myself :)", indent=1) exporter.report.msg("[VisRegion] I'm a SoftVolume myself :)")
rgn.region = this_sv.get_key(exporter, so) rgn.region = this_sv.get_key(exporter, so)
else: else:
if not self.soft_region: if not self.soft_region:
raise ExportError("'{}': Visibility Control must have a Soft Volume selected".format(self.key_name)) raise ExportError("'{}': Visibility Control must have a Soft Volume selected".format(self.key_name))
sv_bo = self.soft_region sv_bo = self.soft_region
sv = sv_bo.plasma_modifiers.softvolume sv = sv_bo.plasma_modifiers.softvolume
exporter.report.msg("[VisRegion] SoftVolume '{}'", sv_bo.name, indent=1) exporter.report.msg("[VisRegion] SoftVolume '{}'", sv_bo.name)
if not sv.enabled: if not sv.enabled:
raise ExportError("'{}': '{}' is not a SoftVolume".format(self.key_name, sv_bo.name)) raise ExportError("'{}': '{}' is not a SoftVolume".format(self.key_name, sv_bo.name))
rgn.region = sv.get_key(exporter) rgn.region = sv.get_key(exporter)

6
korman/properties/modifiers/sound.py

@ -140,10 +140,10 @@ class PlasmaRandomSound(PlasmaModifierProperties):
surface_id = getattr(plPhysicalSndGroup, surface_name) surface_id = getattr(plPhysicalSndGroup, surface_name)
if surface_id in sounds: if surface_id in sounds:
exporter.report.warn("Overwriting physical {} surface '{}' ID:{}", exporter.report.warn("Overwriting physical {} surface '{}' ID:{}",
groupattr, surface_name, surface_id, indent=2) groupattr, surface_name, surface_id)
else: else:
exporter.report.msg("Got physical {} surface '{}' ID:{}", exporter.report.msg("Got physical {} surface '{}' ID:{}",
groupattr, surface_name, surface_id, indent=2) groupattr, surface_name, surface_id)
sounds[surface_id] = rndmod sounds[surface_id] = rndmod
# Keeps the LUT (or should that be lookup vector?) as small as possible # Keeps the LUT (or should that be lookup vector?) as small as possible
setattr(sndgroup, groupattr, [sounds.get(i) for i in range(max(sounds.keys()) + 1)]) setattr(sndgroup, groupattr, [sounds.get(i) for i in range(max(sounds.keys()) + 1)])
@ -345,7 +345,7 @@ class PlasmaSound(idprops.IDPropMixin, bpy.types.PropertyGroup):
name = "Sfx-{}_{}".format(so.key.name, self._sound_name) name = "Sfx-{}_{}".format(so.key.name, self._sound_name)
else: else:
name = "Sfx-{}_{}:{}".format(so.key.name, self._sound_name, channel) name = "Sfx-{}_{}:{}".format(so.key.name, self._sound_name, channel)
exporter.report.msg("[{}] {}", pClass.__name__[2:], name, indent=1) exporter.report.msg("[{}] {}", pClass.__name__[2:], name)
sound = exporter.mgr.find_create_object(pClass, so=so, name=name) sound = exporter.mgr.find_create_object(pClass, so=so, name=name)
# If this object is a soft volume itself, we will use our own soft region. # If this object is a soft volume itself, we will use our own soft region.

2
korman/properties/modifiers/water.py

@ -156,7 +156,7 @@ class PlasmaSwimRegion(idprops.IDPropObjectMixin, PlasmaModifierProperties, bpy.
# swimming surface should have a detector. m'kay? But still, we might want to make note # swimming surface should have a detector. m'kay? But still, we might want to make note
# of this sitation. Just in case someone is like "WTF! Why am I not swimming?!?!1111111" # of this sitation. Just in case someone is like "WTF! Why am I not swimming?!?!1111111"
# Because you need to have a detector, dummy. # Because you need to have a detector, dummy.
exporter.report.warn("Swimming Surface '{}' does not specify a detector region".format(bo.name), indent=2) exporter.report.warn(f"Swimming Surface '{bo.name}' does not specify a detector region")
def get_key(self, exporter, so=None): def get_key(self, exporter, so=None):
pClass = self._CURRENTS[self.current_type] pClass = self._CURRENTS[self.current_type]

Loading…
Cancel
Save