Browse Source

Add rudimentary texture cache

pull/117/head
Adam Johnson 6 years ago
parent
commit
654af0e56a
Signed by: Hoikas
GPG Key ID: 0B6515D6FF6F271E
  1. 22
      korman/exporter/convert.py
  2. 300
      korman/exporter/image.py
  3. 194
      korman/exporter/material.py
  4. 2
      korman/operators/op_export.py
  5. 1
      korman/properties/modifiers/render.py

22
korman/exporter/convert.py

@ -23,6 +23,7 @@ from . import animation
from . import camera
from . import explosions
from . import etlight
from . import image
from . import logger
from . import manager
from . import mesh
@ -54,6 +55,7 @@ class Exporter:
self.animation = animation.AnimationConverter(self)
self.sumfile = sumfile.SumFile()
self.camera = camera.CameraConverter(self)
self.image = image.ImageCache(self)
# Step 0.8: Init the progress mgr
self.mesh.add_progress_presteps(self.report)
@ -66,6 +68,7 @@ class Exporter:
self.report.progress_add_step("Finalizing Plasma Logic")
self.report.progress_add_step("Exporting Textures")
self.report.progress_add_step("Composing Geometry")
self.report.progress_add_step("Saving Age Files")
self.report.progress_start("EXPORTING AGE")
# Step 0.9: Apply modifiers to all meshes temporarily.
@ -101,7 +104,7 @@ class Exporter:
self.mesh.finalize()
# Step 5: FINALLY. Let's write the PRPs and crap.
self.mgr.save_age(Path(self._op.filepath))
self._save_age()
# Step 5.1: Save out the export report.
# If the export fails and this doesn't save, we have bigger problems than
@ -340,3 +343,20 @@ class Exporter:
if proc is not None:
proc(self, bl_obj, sceneobject)
inc_progress()
def _save_age(self):
self.report.progress_advance()
self.mgr.save_age(Path(self._op.filepath))
self.image.save()
@property
def texcache_path(self):
age = bpy.context.scene.world.plasma_age
filepath = age.texcache_path
if not filepath or not Path(filepath).is_file():
filepath = bpy.context.blend_data.filepath
if not filepath:
filepath = self.filepath
filepath = str(Path(filepath).with_suffix(".ktc"))
age.texcache_path = filepath
return filepath

300
korman/exporter/image.py

@ -0,0 +1,300 @@
# This file is part of Korman.
#
# Korman is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Korman is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Korman. If not, see <http://www.gnu.org/licenses/>.
import enum
from pathlib import Path
from PyHSPlasma import *
import weakref
_HEADER_MAGICK = b"KTH\x00"
_INDEX_MAGICK = b"KTI\x00"
_DATA_MAGICK = b"KTC\x00"
_ENTRY_MAGICK = b"KTE\x00"
_IMAGE_MAGICK = b"KTT\x00"
_MIP_MAGICK = b"KTM\x00"
@enum.unique
class _HeaderBits(enum.IntEnum):
last_export = 0
index_pos = 1
@enum.unique
class _IndexBits(enum.IntEnum):
image_count = 0
@enum.unique
class _EntryBits(enum.IntEnum):
image_name = 0
mip_levels = 1
image_pos = 2
compression = 3
source_size = 4
export_size = 5
class _CachedImage:
def __init__(self):
self.name = None
self.mip_levels = 1
self.data_pos = None
self.image_data = None
self.source_size = None
self.export_size = None
self.compression = None
def __str__(self):
return self.name
class ImageCache:
def __init__(self, exporter):
self._exporter = weakref.ref(exporter)
self._images = {}
self._read_stream = hsFileStream()
self._stream_handles = 0
def add_texture(self, key, num_levels, export_size, compression, data):
if key.ephemeral:
return
image = _CachedImage()
image.name = str(key)
image.mip_levels = num_levels
image.compression = compression
image.source_size = key.image.size
image.export_size = export_size
image.image_data = data
self._images[(image.name, compression)] = image
def _compact(self):
for key, image in self._images.copy().items():
if image.image_data is None:
self._images.pop(key)
def __enter__(self):
if self._stream_handles == 0:
path = self._exporter().texcache_path
if Path(path).is_file():
self._read_stream.open(path, fmRead)
self._stream_handles += 1
return self
def __exit__(self, type, value, tb):
self._stream_handles -= 1
if self._stream_handles == 0:
self._read_stream.close()
def get_from_texture(self, texture, compression):
if texture.ephemeral:
return None
key = (str(texture), compression)
cached_image = self._images.get(key)
if cached_image is None:
return None
# ensure the texture key generally matches up with our copy of this image.
# if not, a recache will likely be triggered implicitly.
bl_image = texture.image
if tuple(bl_image.size) != cached_image.source_size:
return None
# ensure the data has been loaded from the cache
if cached_image.image_data is None:
try:
cached_image.image_data = tuple(self._read_image_data(cached_image, self._read_stream))
except AssertionError:
self._report.warn("Cached copy of '{}' is corrupt and will be discarded", cached_image.name, indent=2)
self._images.pop(key)
return None
return cached_image
def load(self):
try:
with self:
self._read(self._read_stream)
except AssertionError:
self._report.warn("Texture Cache is corrupt and will be regenerated")
self._images.clear()
def _read(self, stream):
if stream.size == 0:
return
stream.seek(0)
assert stream.read(4) == _HEADER_MAGICK
# if we use a bit vector to define our header strcture, we can add
# new fields without having to up the file version, trashing old
# texture cache files... :)
flags = hsBitVector()
flags.read(stream)
# ALWAYS ADD NEW FIELDS TO THE END OF THIS SECTION!!!!!!!
if flags[_HeaderBits.last_export]:
self.last_export = stream.readDouble()
if flags[_HeaderBits.index_pos]:
index_pos = stream.readInt()
self._read_index(index_pos, stream)
def _read_image_data(self, image, stream):
if image.data_pos is None:
return None
assert stream.size > 0
stream.seek(image.data_pos)
assert stream.read(4) == _IMAGE_MAGICK
# unused currently
image_flags = hsBitVector()
image_flags.read(stream)
# given this is a generator, someone else might change our stream position
# between iterations, so we'd best bookkeep the position
pos = stream.pos
for i in range(image.mip_levels):
if stream.pos != pos:
stream.seek(pos)
assert stream.read(4) == _MIP_MAGICK
# this should only ever be image data...
# store your flags somewhere else!
size = stream.readInt()
data = stream.read(size)
pos = stream.pos
yield data
def _read_index(self, index_pos, stream):
stream.seek(index_pos)
assert stream.read(4) == _INDEX_MAGICK
# See above, can change the index format easily...
flags = hsBitVector()
flags.read(stream)
# ALWAYS ADD NEW FIELDS TO THE END OF THIS SECTION!!!!!!!
image_count = stream.readInt() if flags[_IndexBits.image_count] else 0
# Here begins the image map
assert stream.read(4) == _DATA_MAGICK
for i in range(image_count):
self._read_index_entry(stream)
def _read_index_entry(self, stream):
assert stream.read(4) == _ENTRY_MAGICK
image = _CachedImage()
# See above, can change the entry format easily...
flags = hsBitVector()
flags.read(stream)
# ALWAYS ADD NEW FIELDS TO THE END OF THIS SECTION!!!!!!!
if flags[_EntryBits.image_name]:
image.name = stream.readSafeWStr()
if flags[_EntryBits.mip_levels]:
image.mip_levels = stream.readByte()
if flags[_EntryBits.image_pos]:
image.data_pos = stream.readInt()
if flags[_EntryBits.compression]:
image.compression = stream.readByte()
if flags[_EntryBits.source_size]:
image.source_size = (stream.readInt(), stream.readInt())
if flags[_EntryBits.export_size]:
image.export_size = (stream.readInt(), stream.readInt())
# do we need to check for duplicate images?
self._images[(image.name, image.compression)] = image
@property
def _report(self):
return self._exporter().report
def save(self):
# TODO: add a way to preserve unused images for a brief period so we don't toss
# already cached images that are only removed from the age temporarily...
self._compact()
# Assume all read operations are done (don't be within' my cache while you savin')
assert self._stream_handles == 0
with hsFileStream().open(self._exporter().texcache_path, fmWrite) as stream:
self._write(stream)
def _write(self, stream):
flags = hsBitVector()
flags[_HeaderBits.index_pos] = True
stream.seek(0)
stream.write(_HEADER_MAGICK)
flags.write(stream)
header_index_pos = stream.pos
stream.writeInt(-1)
for image in self._images.values():
self._write_image_data(image, stream)
# fix the index position
index_pos = stream.pos
self._write_index(stream)
stream.seek(header_index_pos)
stream.writeInt(index_pos)
def _write_image_data(self, image, stream):
# unused currently
flags = hsBitVector()
image.data_pos = stream.pos
stream.write(_IMAGE_MAGICK)
flags.write(stream)
for i in image.image_data:
stream.write(_MIP_MAGICK)
stream.writeInt(len(i))
stream.write(i)
def _write_index(self, stream):
flags = hsBitVector()
flags[_IndexBits.image_count] = True
pos = stream.pos
stream.write(_INDEX_MAGICK)
flags.write(stream)
stream.writeInt(len(self._images))
stream.write(_DATA_MAGICK)
for image in self._images.values():
self._write_index_entry(image, stream)
return pos
def _write_index_entry(self, image, stream):
flags = hsBitVector()
flags[_EntryBits.image_name] = True
flags[_EntryBits.mip_levels] = True
flags[_EntryBits.image_pos] = True
flags[_EntryBits.compression] = True
flags[_EntryBits.source_size] = True
flags[_EntryBits.export_size] = True
stream.write(_ENTRY_MAGICK)
flags.write(stream)
stream.writeSafeWStr(str(image))
stream.writeByte(image.mip_levels)
stream.writeInt(image.data_pos)
stream.writeByte(image.compression)
stream.writeInt(image.source_size[0])
stream.writeInt(image.source_size[1])
stream.writeInt(image.export_size[0])
stream.writeInt(image.export_size[1])

194
korman/exporter/material.py

@ -82,6 +82,7 @@ class _Texture:
else:
self.auto_ext = "hsm"
self.extension = kwargs.get("extension", self.auto_ext)
self.ephemeral = kwargs.get("ephemeral", False)
self.image = image
def __eq__(self, other):
@ -653,6 +654,7 @@ class MaterialConverter:
to use the image datablock extension, set this to None
- indent: (optional) indentation level for log messages
default: 2
- ephemeral: (optional) never cache this image
"""
owner = kwargs.pop("owner", None)
indent = kwargs.pop("indent", 2)
@ -672,91 +674,117 @@ class MaterialConverter:
inc_progress = self._report.progress_increment
mgr = self._mgr
for key, owners in self._pending.items():
name = str(key)
self._report.msg("\n[Mipmap '{}']", name)
image = key.image
oWidth, oHeight = image.size
if oWidth == 0 and oHeight == 0:
raise ExportError("Image '{}' could not be loaded.".format(image.name))
eWidth = helpers.ensure_power_of_two(oWidth)
eHeight = helpers.ensure_power_of_two(oHeight)
if (eWidth != oWidth) or (eHeight != oHeight):
self._report.msg("Image is not a POT ({}x{}) resizing to {}x{}",
oWidth, oHeight, eWidth, eHeight, indent=1)
self._resize_image(image, eWidth, eHeight)
# Now we try to use the pile of hints we were given to figure out what format to use
allowed_formats = key.allowed_formats
if key.mipmap:
compression = plBitmap.kDirectXCompression
elif "PNG" in allowed_formats and self._mgr.getVer() == pvMoul:
compression = plBitmap.kPNGCompression
elif "DDS" in allowed_formats:
compression = plBitmap.kDirectXCompression
elif "JPG" in allowed_formats:
compression = plBitmap.kJPEGCompression
elif "BMP" in allowed_formats:
compression = plBitmap.kUncompressed
else:
raise RuntimeError(allowed_formats)
dxt = plBitmap.kDXT5 if key.use_alpha or key.calc_alpha else plBitmap.kDXT1
# Grab the image data from OpenGL and stuff it into the plBitmap
helper = GLTexture(key)
with helper as glimage:
if compression == plBitmap.kDirectXCompression:
numLevels = glimage.num_levels
self._report.msg("Generating mip levels", indent=1)
glimage.generate_mipmap()
# This with statement causes the texture cache to hold open a
# read stream for the cache file, preventing spurious open-close
# spin washing during this tight loop. Note that the cache still
# has to actually be loaded ^_^
with self._texcache as texcache:
texcache.load()
for key, owners in self._pending.items():
name = str(key)
self._report.msg("\n[Mipmap '{}']", name)
image = key.image
oWidth, oHeight = image.size
if oWidth == 0 and oHeight == 0:
raise ExportError("Image '{}' could not be loaded.".format(image.name))
# Now we try to use the pile of hints we were given to figure out what format to use
allowed_formats = key.allowed_formats
if key.mipmap:
compression = plBitmap.kDirectXCompression
elif "PNG" in allowed_formats and self._mgr.getVer() == pvMoul:
compression = plBitmap.kPNGCompression
elif "DDS" in allowed_formats:
compression = plBitmap.kDirectXCompression
elif "JPG" in allowed_formats:
compression = plBitmap.kJPEGCompression
elif "BMP" in allowed_formats:
compression = plBitmap.kUncompressed
else:
numLevels = 1
self._report.msg("Stuffing image data", indent=1)
# Non-DXT images are BGRA in Plasma
fmt = compression != plBitmap.kDirectXCompression
# Hold the uncompressed level data for now. We may have to make multiple copies of
# this mipmap for per-page textures :(
data = []
for i in range(numLevels):
data.append(glimage.get_level_data(i, key.calc_alpha, fmt, report=self._report))
# Be a good citizen and reset the Blender Image to pre-futzing state
image.reload()
# Now we poke our new bitmap into the pending layers. Note that we have to do some funny
# business to account for per-page textures
pages = {}
self._report.msg("Adding to...", indent=1)
for owner_key in owners:
owner = owner_key.object
self._report.msg("[{} '{}']", owner.ClassName()[2:], owner_key.name, indent=2)
page = mgr.get_textures_page(owner_key) # Layer's page or Textures.prp
# If we haven't created this plMipmap in the page (either layer's page or Textures.prp),
# then we need to do that and stuff the level data. This is a little tedious, but we
# need to be careful to manage our resources correctly
if page not in pages:
mipmap = plMipmap(name=name, width=eWidth, height=eHeight, numLevels=numLevels,
compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt)
helper.store_in_mipmap(mipmap, data, compression)
mgr.AddObject(page, mipmap)
pages[page] = mipmap
raise RuntimeError(allowed_formats)
dxt = plBitmap.kDXT5 if key.use_alpha or key.calc_alpha else plBitmap.kDXT1
# Mayhaps we have a cached version of this that has already been exported
cached_image = texcache.get_from_texture(key, compression)
if cached_image is None:
eWidth = helpers.ensure_power_of_two(oWidth)
eHeight = helpers.ensure_power_of_two(oHeight)
if (eWidth != oWidth) or (eHeight != oHeight):
self._report.msg("Image is not a POT ({}x{}) resizing to {}x{}",
oWidth, oHeight, eWidth, eHeight, indent=1)
self._resize_image(image, eWidth, eHeight)
# Grab the image data from OpenGL and stuff it into the plBitmap
helper = GLTexture(key)
with helper as glimage:
if compression == plBitmap.kDirectXCompression:
numLevels = glimage.num_levels
self._report.msg("Generating mip levels", indent=1)
glimage.generate_mipmap()
else:
numLevels = 1
self._report.msg("Compressing single level", indent=1)
# Non-DXT images are BGRA in Plasma
fmt = compression != plBitmap.kDirectXCompression
# Hold the uncompressed level data for now. We may have to make multiple copies of
# this mipmap for per-page textures :(
data = []
for i in range(numLevels):
data.append(glimage.get_level_data(i, key.calc_alpha, fmt, report=self._report))
# Be a good citizen and reset the Blender Image to pre-futzing state
image.reload()
# If this is a DXT-compressed mipmap, we need to use a temporary mipmap
# to do the compression. We'll then steal the data from it.
if compression == plBitmap.kDirectXCompression:
mipmap = plMipmap(name=name, width=eWidth, height=eHeight, numLevels=numLevels,
compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt)
for i in range(numLevels):
mipmap.CompressImage(i, data[i])
data[i] = mipmap.getLevel(i)
texcache.add_texture(key, numLevels, (eWidth, eHeight), compression, data)
else:
mipmap = pages[page]
eWidth, eHeight = cached_image.export_size
data = cached_image.image_data
numLevels = cached_image.mip_levels
# Now we poke our new bitmap into the pending layers. Note that we have to do some funny
# business to account for per-page textures
pages = {}
self._report.msg("Adding to...", indent=1)
for owner_key in owners:
owner = owner_key.object
self._report.msg("[{} '{}']", owner.ClassName()[2:], owner_key.name, indent=2)
page = mgr.get_textures_page(owner_key) # Layer's page or Textures.prp
# If we haven't created this plMipmap in the page (either layer's page or Textures.prp),
# then we need to do that and stuff the level data. This is a little tedious, but we
# need to be careful to manage our resources correctly
if page not in pages:
mipmap = plMipmap(name=name, width=eWidth, height=eHeight, numLevels=numLevels,
compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt)
for i, buf in enumerate(data):
mipmap.setLevel(i, buf)
mgr.AddObject(page, mipmap)
pages[page] = mipmap
else:
mipmap = pages[page]
if isinstance(owner, plLayerInterface):
owner.texture = mipmap.key
elif isinstance(owner, plImageLibMod):
owner.addImage(mipmap.key)
else:
raise RuntimeError(owner.ClassName())
if isinstance(owner, plLayerInterface):
owner.texture = mipmap.key
elif isinstance(owner, plImageLibMod):
owner.addImage(mipmap.key)
else:
raise RuntimeError(owner.ClassName())
inc_progress()
inc_progress()
def get_materials(self, bo):
return self._obj2mat.get(bo, [])
@ -843,3 +871,7 @@ class MaterialConverter:
self._alphatest[image] = result
return result
@property
def _texcache(self):
return self._exporter().image

2
korman/operators/op_export.py

@ -48,6 +48,8 @@ class ExportOperator(bpy.types.Operator):
"show_console": (BoolProperty, {"name": "Display Log Console",
"description": "Forces the Blender System Console open during the export",
"default": True}),
"texcache_path": (StringProperty, {"name": "Texture Cache"}),
}
# This wigs out and very bad things happen if it's not directly on the operator...

1
korman/properties/modifiers/render.py

@ -215,6 +215,7 @@ class PlasmaLightMapGen(idprops.IDPropMixin, PlasmaModifierProperties):
# Mmm... cheating
mat_mgr.export_prepared_image(owner=layer, image=lightmap_im,
allowed_formats={"PNG", "DDS"},
ephemeral=True,
indent=2)
@classmethod

Loading…
Cancel
Save