From 654af0e56a087cf7cebba50b989209262f153d58 Mon Sep 17 00:00:00 2001 From: Adam Johnson Date: Sun, 2 Sep 2018 17:57:05 -0400 Subject: [PATCH 1/5] Add rudimentary texture cache --- korman/exporter/convert.py | 22 +- korman/exporter/image.py | 300 ++++++++++++++++++++++++++ korman/exporter/material.py | 194 ++++++++++------- korman/operators/op_export.py | 2 + korman/properties/modifiers/render.py | 1 + 5 files changed, 437 insertions(+), 82 deletions(-) create mode 100644 korman/exporter/image.py diff --git a/korman/exporter/convert.py b/korman/exporter/convert.py index 558d9f6..f0647fb 100644 --- a/korman/exporter/convert.py +++ b/korman/exporter/convert.py @@ -23,6 +23,7 @@ from . import animation from . import camera from . import explosions from . import etlight +from . import image from . import logger from . import manager from . import mesh @@ -54,6 +55,7 @@ class Exporter: self.animation = animation.AnimationConverter(self) self.sumfile = sumfile.SumFile() self.camera = camera.CameraConverter(self) + self.image = image.ImageCache(self) # Step 0.8: Init the progress mgr self.mesh.add_progress_presteps(self.report) @@ -66,6 +68,7 @@ class Exporter: self.report.progress_add_step("Finalizing Plasma Logic") self.report.progress_add_step("Exporting Textures") self.report.progress_add_step("Composing Geometry") + self.report.progress_add_step("Saving Age Files") self.report.progress_start("EXPORTING AGE") # Step 0.9: Apply modifiers to all meshes temporarily. @@ -101,7 +104,7 @@ class Exporter: self.mesh.finalize() # Step 5: FINALLY. Let's write the PRPs and crap. - self.mgr.save_age(Path(self._op.filepath)) + self._save_age() # Step 5.1: Save out the export report. # If the export fails and this doesn't save, we have bigger problems than @@ -340,3 +343,20 @@ class Exporter: if proc is not None: proc(self, bl_obj, sceneobject) inc_progress() + + def _save_age(self): + self.report.progress_advance() + self.mgr.save_age(Path(self._op.filepath)) + self.image.save() + + @property + def texcache_path(self): + age = bpy.context.scene.world.plasma_age + filepath = age.texcache_path + if not filepath or not Path(filepath).is_file(): + filepath = bpy.context.blend_data.filepath + if not filepath: + filepath = self.filepath + filepath = str(Path(filepath).with_suffix(".ktc")) + age.texcache_path = filepath + return filepath diff --git a/korman/exporter/image.py b/korman/exporter/image.py new file mode 100644 index 0000000..fff107e --- /dev/null +++ b/korman/exporter/image.py @@ -0,0 +1,300 @@ +# This file is part of Korman. +# +# Korman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Korman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Korman. If not, see . + +import enum +from pathlib import Path +from PyHSPlasma import * +import weakref + +_HEADER_MAGICK = b"KTH\x00" +_INDEX_MAGICK = b"KTI\x00" +_DATA_MAGICK = b"KTC\x00" +_ENTRY_MAGICK = b"KTE\x00" +_IMAGE_MAGICK = b"KTT\x00" +_MIP_MAGICK = b"KTM\x00" + +@enum.unique +class _HeaderBits(enum.IntEnum): + last_export = 0 + index_pos = 1 + + +@enum.unique +class _IndexBits(enum.IntEnum): + image_count = 0 + + +@enum.unique +class _EntryBits(enum.IntEnum): + image_name = 0 + mip_levels = 1 + image_pos = 2 + compression = 3 + source_size = 4 + export_size = 5 + + +class _CachedImage: + def __init__(self): + self.name = None + self.mip_levels = 1 + self.data_pos = None + self.image_data = None + self.source_size = None + self.export_size = None + self.compression = None + + def __str__(self): + return self.name + + +class ImageCache: + def __init__(self, exporter): + self._exporter = weakref.ref(exporter) + self._images = {} + self._read_stream = hsFileStream() + self._stream_handles = 0 + + def add_texture(self, key, num_levels, export_size, compression, data): + if key.ephemeral: + return + image = _CachedImage() + image.name = str(key) + image.mip_levels = num_levels + image.compression = compression + image.source_size = key.image.size + image.export_size = export_size + image.image_data = data + self._images[(image.name, compression)] = image + + def _compact(self): + for key, image in self._images.copy().items(): + if image.image_data is None: + self._images.pop(key) + + def __enter__(self): + if self._stream_handles == 0: + path = self._exporter().texcache_path + if Path(path).is_file(): + self._read_stream.open(path, fmRead) + self._stream_handles += 1 + return self + + def __exit__(self, type, value, tb): + self._stream_handles -= 1 + if self._stream_handles == 0: + self._read_stream.close() + + def get_from_texture(self, texture, compression): + if texture.ephemeral: + return None + + key = (str(texture), compression) + cached_image = self._images.get(key) + if cached_image is None: + return None + + # ensure the texture key generally matches up with our copy of this image. + # if not, a recache will likely be triggered implicitly. + bl_image = texture.image + if tuple(bl_image.size) != cached_image.source_size: + return None + + # ensure the data has been loaded from the cache + if cached_image.image_data is None: + try: + cached_image.image_data = tuple(self._read_image_data(cached_image, self._read_stream)) + except AssertionError: + self._report.warn("Cached copy of '{}' is corrupt and will be discarded", cached_image.name, indent=2) + self._images.pop(key) + return None + return cached_image + + def load(self): + try: + with self: + self._read(self._read_stream) + except AssertionError: + self._report.warn("Texture Cache is corrupt and will be regenerated") + self._images.clear() + + def _read(self, stream): + if stream.size == 0: + return + stream.seek(0) + assert stream.read(4) == _HEADER_MAGICK + + # if we use a bit vector to define our header strcture, we can add + # new fields without having to up the file version, trashing old + # texture cache files... :) + flags = hsBitVector() + flags.read(stream) + + # ALWAYS ADD NEW FIELDS TO THE END OF THIS SECTION!!!!!!! + if flags[_HeaderBits.last_export]: + self.last_export = stream.readDouble() + if flags[_HeaderBits.index_pos]: + index_pos = stream.readInt() + self._read_index(index_pos, stream) + + def _read_image_data(self, image, stream): + if image.data_pos is None: + return None + + assert stream.size > 0 + stream.seek(image.data_pos) + assert stream.read(4) == _IMAGE_MAGICK + + # unused currently + image_flags = hsBitVector() + image_flags.read(stream) + + # given this is a generator, someone else might change our stream position + # between iterations, so we'd best bookkeep the position + pos = stream.pos + + for i in range(image.mip_levels): + if stream.pos != pos: + stream.seek(pos) + assert stream.read(4) == _MIP_MAGICK + + # this should only ever be image data... + # store your flags somewhere else! + size = stream.readInt() + data = stream.read(size) + pos = stream.pos + yield data + + def _read_index(self, index_pos, stream): + stream.seek(index_pos) + assert stream.read(4) == _INDEX_MAGICK + + # See above, can change the index format easily... + flags = hsBitVector() + flags.read(stream) + + # ALWAYS ADD NEW FIELDS TO THE END OF THIS SECTION!!!!!!! + image_count = stream.readInt() if flags[_IndexBits.image_count] else 0 + + # Here begins the image map + assert stream.read(4) == _DATA_MAGICK + for i in range(image_count): + self._read_index_entry(stream) + + def _read_index_entry(self, stream): + assert stream.read(4) == _ENTRY_MAGICK + image = _CachedImage() + + # See above, can change the entry format easily... + flags = hsBitVector() + flags.read(stream) + + # ALWAYS ADD NEW FIELDS TO THE END OF THIS SECTION!!!!!!! + if flags[_EntryBits.image_name]: + image.name = stream.readSafeWStr() + if flags[_EntryBits.mip_levels]: + image.mip_levels = stream.readByte() + if flags[_EntryBits.image_pos]: + image.data_pos = stream.readInt() + if flags[_EntryBits.compression]: + image.compression = stream.readByte() + if flags[_EntryBits.source_size]: + image.source_size = (stream.readInt(), stream.readInt()) + if flags[_EntryBits.export_size]: + image.export_size = (stream.readInt(), stream.readInt()) + + # do we need to check for duplicate images? + self._images[(image.name, image.compression)] = image + + @property + def _report(self): + return self._exporter().report + + def save(self): + # TODO: add a way to preserve unused images for a brief period so we don't toss + # already cached images that are only removed from the age temporarily... + self._compact() + + # Assume all read operations are done (don't be within' my cache while you savin') + assert self._stream_handles == 0 + + with hsFileStream().open(self._exporter().texcache_path, fmWrite) as stream: + self._write(stream) + + def _write(self, stream): + flags = hsBitVector() + flags[_HeaderBits.index_pos] = True + + stream.seek(0) + stream.write(_HEADER_MAGICK) + flags.write(stream) + header_index_pos = stream.pos + stream.writeInt(-1) + + for image in self._images.values(): + self._write_image_data(image, stream) + + # fix the index position + index_pos = stream.pos + self._write_index(stream) + stream.seek(header_index_pos) + stream.writeInt(index_pos) + + def _write_image_data(self, image, stream): + # unused currently + flags = hsBitVector() + + image.data_pos = stream.pos + stream.write(_IMAGE_MAGICK) + flags.write(stream) + + for i in image.image_data: + stream.write(_MIP_MAGICK) + stream.writeInt(len(i)) + stream.write(i) + + def _write_index(self, stream): + flags = hsBitVector() + flags[_IndexBits.image_count] = True + + pos = stream.pos + stream.write(_INDEX_MAGICK) + flags.write(stream) + stream.writeInt(len(self._images)) + + stream.write(_DATA_MAGICK) + for image in self._images.values(): + self._write_index_entry(image, stream) + return pos + + def _write_index_entry(self, image, stream): + flags = hsBitVector() + flags[_EntryBits.image_name] = True + flags[_EntryBits.mip_levels] = True + flags[_EntryBits.image_pos] = True + flags[_EntryBits.compression] = True + flags[_EntryBits.source_size] = True + flags[_EntryBits.export_size] = True + + stream.write(_ENTRY_MAGICK) + flags.write(stream) + stream.writeSafeWStr(str(image)) + stream.writeByte(image.mip_levels) + stream.writeInt(image.data_pos) + stream.writeByte(image.compression) + stream.writeInt(image.source_size[0]) + stream.writeInt(image.source_size[1]) + stream.writeInt(image.export_size[0]) + stream.writeInt(image.export_size[1]) diff --git a/korman/exporter/material.py b/korman/exporter/material.py index 2bf9927..96f31ef 100644 --- a/korman/exporter/material.py +++ b/korman/exporter/material.py @@ -82,6 +82,7 @@ class _Texture: else: self.auto_ext = "hsm" self.extension = kwargs.get("extension", self.auto_ext) + self.ephemeral = kwargs.get("ephemeral", False) self.image = image def __eq__(self, other): @@ -653,6 +654,7 @@ class MaterialConverter: to use the image datablock extension, set this to None - indent: (optional) indentation level for log messages default: 2 + - ephemeral: (optional) never cache this image """ owner = kwargs.pop("owner", None) indent = kwargs.pop("indent", 2) @@ -672,91 +674,117 @@ class MaterialConverter: inc_progress = self._report.progress_increment mgr = self._mgr - for key, owners in self._pending.items(): - name = str(key) - self._report.msg("\n[Mipmap '{}']", name) - - image = key.image - oWidth, oHeight = image.size - if oWidth == 0 and oHeight == 0: - raise ExportError("Image '{}' could not be loaded.".format(image.name)) - - eWidth = helpers.ensure_power_of_two(oWidth) - eHeight = helpers.ensure_power_of_two(oHeight) - if (eWidth != oWidth) or (eHeight != oHeight): - self._report.msg("Image is not a POT ({}x{}) resizing to {}x{}", - oWidth, oHeight, eWidth, eHeight, indent=1) - self._resize_image(image, eWidth, eHeight) - - # Now we try to use the pile of hints we were given to figure out what format to use - allowed_formats = key.allowed_formats - if key.mipmap: - compression = plBitmap.kDirectXCompression - elif "PNG" in allowed_formats and self._mgr.getVer() == pvMoul: - compression = plBitmap.kPNGCompression - elif "DDS" in allowed_formats: - compression = plBitmap.kDirectXCompression - elif "JPG" in allowed_formats: - compression = plBitmap.kJPEGCompression - elif "BMP" in allowed_formats: - compression = plBitmap.kUncompressed - else: - raise RuntimeError(allowed_formats) - dxt = plBitmap.kDXT5 if key.use_alpha or key.calc_alpha else plBitmap.kDXT1 - - # Grab the image data from OpenGL and stuff it into the plBitmap - helper = GLTexture(key) - with helper as glimage: - if compression == plBitmap.kDirectXCompression: - numLevels = glimage.num_levels - self._report.msg("Generating mip levels", indent=1) - glimage.generate_mipmap() + # This with statement causes the texture cache to hold open a + # read stream for the cache file, preventing spurious open-close + # spin washing during this tight loop. Note that the cache still + # has to actually be loaded ^_^ + with self._texcache as texcache: + texcache.load() + + for key, owners in self._pending.items(): + name = str(key) + self._report.msg("\n[Mipmap '{}']", name) + + image = key.image + oWidth, oHeight = image.size + if oWidth == 0 and oHeight == 0: + raise ExportError("Image '{}' could not be loaded.".format(image.name)) + + # Now we try to use the pile of hints we were given to figure out what format to use + allowed_formats = key.allowed_formats + if key.mipmap: + compression = plBitmap.kDirectXCompression + elif "PNG" in allowed_formats and self._mgr.getVer() == pvMoul: + compression = plBitmap.kPNGCompression + elif "DDS" in allowed_formats: + compression = plBitmap.kDirectXCompression + elif "JPG" in allowed_formats: + compression = plBitmap.kJPEGCompression + elif "BMP" in allowed_formats: + compression = plBitmap.kUncompressed else: - numLevels = 1 - self._report.msg("Stuffing image data", indent=1) - - # Non-DXT images are BGRA in Plasma - fmt = compression != plBitmap.kDirectXCompression - - # Hold the uncompressed level data for now. We may have to make multiple copies of - # this mipmap for per-page textures :( - data = [] - for i in range(numLevels): - data.append(glimage.get_level_data(i, key.calc_alpha, fmt, report=self._report)) - - # Be a good citizen and reset the Blender Image to pre-futzing state - image.reload() - - # Now we poke our new bitmap into the pending layers. Note that we have to do some funny - # business to account for per-page textures - pages = {} - - self._report.msg("Adding to...", indent=1) - for owner_key in owners: - owner = owner_key.object - self._report.msg("[{} '{}']", owner.ClassName()[2:], owner_key.name, indent=2) - page = mgr.get_textures_page(owner_key) # Layer's page or Textures.prp - - # If we haven't created this plMipmap in the page (either layer's page or Textures.prp), - # then we need to do that and stuff the level data. This is a little tedious, but we - # need to be careful to manage our resources correctly - if page not in pages: - mipmap = plMipmap(name=name, width=eWidth, height=eHeight, numLevels=numLevels, - compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt) - helper.store_in_mipmap(mipmap, data, compression) - mgr.AddObject(page, mipmap) - pages[page] = mipmap + raise RuntimeError(allowed_formats) + dxt = plBitmap.kDXT5 if key.use_alpha or key.calc_alpha else plBitmap.kDXT1 + + # Mayhaps we have a cached version of this that has already been exported + cached_image = texcache.get_from_texture(key, compression) + + if cached_image is None: + eWidth = helpers.ensure_power_of_two(oWidth) + eHeight = helpers.ensure_power_of_two(oHeight) + if (eWidth != oWidth) or (eHeight != oHeight): + self._report.msg("Image is not a POT ({}x{}) resizing to {}x{}", + oWidth, oHeight, eWidth, eHeight, indent=1) + self._resize_image(image, eWidth, eHeight) + + # Grab the image data from OpenGL and stuff it into the plBitmap + helper = GLTexture(key) + with helper as glimage: + if compression == plBitmap.kDirectXCompression: + numLevels = glimage.num_levels + self._report.msg("Generating mip levels", indent=1) + glimage.generate_mipmap() + else: + numLevels = 1 + self._report.msg("Compressing single level", indent=1) + + # Non-DXT images are BGRA in Plasma + fmt = compression != plBitmap.kDirectXCompression + + # Hold the uncompressed level data for now. We may have to make multiple copies of + # this mipmap for per-page textures :( + data = [] + for i in range(numLevels): + data.append(glimage.get_level_data(i, key.calc_alpha, fmt, report=self._report)) + + # Be a good citizen and reset the Blender Image to pre-futzing state + image.reload() + + # If this is a DXT-compressed mipmap, we need to use a temporary mipmap + # to do the compression. We'll then steal the data from it. + if compression == plBitmap.kDirectXCompression: + mipmap = plMipmap(name=name, width=eWidth, height=eHeight, numLevels=numLevels, + compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt) + for i in range(numLevels): + mipmap.CompressImage(i, data[i]) + data[i] = mipmap.getLevel(i) + texcache.add_texture(key, numLevels, (eWidth, eHeight), compression, data) else: - mipmap = pages[page] + eWidth, eHeight = cached_image.export_size + data = cached_image.image_data + numLevels = cached_image.mip_levels + + # Now we poke our new bitmap into the pending layers. Note that we have to do some funny + # business to account for per-page textures + pages = {} + + self._report.msg("Adding to...", indent=1) + for owner_key in owners: + owner = owner_key.object + self._report.msg("[{} '{}']", owner.ClassName()[2:], owner_key.name, indent=2) + page = mgr.get_textures_page(owner_key) # Layer's page or Textures.prp + + # If we haven't created this plMipmap in the page (either layer's page or Textures.prp), + # then we need to do that and stuff the level data. This is a little tedious, but we + # need to be careful to manage our resources correctly + if page not in pages: + mipmap = plMipmap(name=name, width=eWidth, height=eHeight, numLevels=numLevels, + compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt) + for i, buf in enumerate(data): + mipmap.setLevel(i, buf) + mgr.AddObject(page, mipmap) + pages[page] = mipmap + else: + mipmap = pages[page] - if isinstance(owner, plLayerInterface): - owner.texture = mipmap.key - elif isinstance(owner, plImageLibMod): - owner.addImage(mipmap.key) - else: - raise RuntimeError(owner.ClassName()) + if isinstance(owner, plLayerInterface): + owner.texture = mipmap.key + elif isinstance(owner, plImageLibMod): + owner.addImage(mipmap.key) + else: + raise RuntimeError(owner.ClassName()) - inc_progress() + inc_progress() def get_materials(self, bo): return self._obj2mat.get(bo, []) @@ -843,3 +871,7 @@ class MaterialConverter: self._alphatest[image] = result return result + + @property + def _texcache(self): + return self._exporter().image diff --git a/korman/operators/op_export.py b/korman/operators/op_export.py index 27b19e1..d06def1 100644 --- a/korman/operators/op_export.py +++ b/korman/operators/op_export.py @@ -48,6 +48,8 @@ class ExportOperator(bpy.types.Operator): "show_console": (BoolProperty, {"name": "Display Log Console", "description": "Forces the Blender System Console open during the export", "default": True}), + + "texcache_path": (StringProperty, {"name": "Texture Cache"}), } # This wigs out and very bad things happen if it's not directly on the operator... diff --git a/korman/properties/modifiers/render.py b/korman/properties/modifiers/render.py index 5fd2786..00cf8a0 100644 --- a/korman/properties/modifiers/render.py +++ b/korman/properties/modifiers/render.py @@ -215,6 +215,7 @@ class PlasmaLightMapGen(idprops.IDPropMixin, PlasmaModifierProperties): # Mmm... cheating mat_mgr.export_prepared_image(owner=layer, image=lightmap_im, allowed_formats={"PNG", "DDS"}, + ephemeral=True, indent=2) @classmethod From 2a49634e434c600272c4018bb16c5de64ce16b57 Mon Sep 17 00:00:00 2001 From: Adam Johnson Date: Sun, 2 Sep 2018 17:59:46 -0400 Subject: [PATCH 2/5] Update korlib for texture caching Previously, the C korlib used a custom buffer class to avoid a memcpy operation. However, pyMipmap expects binary string (PyBytes) objects, so this changes the code to use them. Future work would be to continue using PyBytes and removing the copy. For now, there are bigger fish to fry... --- korlib/CMakeLists.txt | 2 - korlib/buffer.cpp | 111 --------------------------------------- korlib/buffer.h | 37 ------------- korlib/module.cpp | 2 - korlib/texture.cpp | 38 +------------- korman/korlib/texture.py | 5 -- 6 files changed, 2 insertions(+), 193 deletions(-) delete mode 100644 korlib/buffer.cpp delete mode 100644 korlib/buffer.h diff --git a/korlib/CMakeLists.txt b/korlib/CMakeLists.txt index a00a011..b0fc144 100644 --- a/korlib/CMakeLists.txt +++ b/korlib/CMakeLists.txt @@ -18,7 +18,6 @@ endif() # Da files set(korlib_HEADERS - buffer.h bumpmap.h korlib.h sound.h @@ -26,7 +25,6 @@ set(korlib_HEADERS ) set(korlib_SOURCES - buffer.cpp bumpmap.cpp module.cpp sound.cpp diff --git a/korlib/buffer.cpp b/korlib/buffer.cpp deleted file mode 100644 index 120f526..0000000 --- a/korlib/buffer.cpp +++ /dev/null @@ -1,111 +0,0 @@ -/* This file is part of Korman. - * - * Korman is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Korman is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with Korman. If not, see . - */ - -#include "buffer.h" - -extern "C" { - -static void pyBuffer_dealloc(pyBuffer* self) { - delete[] self->m_buffer; - Py_TYPE(self)->tp_free((PyObject*)self); -} - -static PyObject* pyBuffer_new(PyTypeObject* type, PyObject* args, PyObject* kwds) { - PyErr_SetString(PyExc_RuntimeError, "Buffers cannot be created by mere mortals"); - return NULL; -} - -PyTypeObject pyBuffer_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_korlib.Buffer", /* tp_name */ - sizeof(pyBuffer), /* tp_basicsize */ - 0, /* tp_itemsize */ - - (destructor)pyBuffer_dealloc, /* tp_dealloc */ - NULL, /* tp_print */ - NULL, /* tp_getattr */ - NULL, /* tp_setattr */ - NULL, /* tp_compare */ - NULL, /* tp_repr */ - NULL, /* tp_as_number */ - NULL, /* tp_as_sequence */ - NULL, /* tp_as_mapping */ - NULL, /* tp_hash */ - NULL, /* tp_call */ - NULL, /* tp_str */ - NULL, /* tp_getattro */ - NULL, /* tp_setattro */ - NULL, /* tp_as_buffer */ - - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - "Buffer", /* tp_doc */ - - NULL, /* tp_traverse */ - NULL, /* tp_clear */ - NULL, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - NULL, /* tp_iter */ - NULL, /* tp_iternext */ - - NULL, /* tp_methods */ - NULL, /* tp_members */ - NULL, /* tp_getset */ - NULL, /* tp_base */ - NULL, /* tp_dict */ - NULL, /* tp_descr_get */ - NULL, /* tp_descr_set */ - 0, /* tp_dictoffset */ - - NULL, /* tp_init */ - NULL, /* tp_alloc */ - pyBuffer_new, /* tp_new */ - NULL, /* tp_free */ - NULL, /* tp_is_gc */ - - NULL, /* tp_bases */ - NULL, /* tp_mro */ - NULL, /* tp_cache */ - NULL, /* tp_subclasses */ - NULL, /* tp_weaklist */ - - NULL, /* tp_del */ - 0, /* tp_version_tag */ - NULL, /* tp_finalize */ -}; - -PyObject* Init_pyBuffer_Type() { - if (PyType_Ready(&pyBuffer_Type) < 0) - return NULL; - - Py_INCREF(&pyBuffer_Type); - return (PyObject*)&pyBuffer_Type; -} - -int pyBuffer_Check(PyObject* obj) { - if (obj->ob_type == &pyBuffer_Type - || PyType_IsSubtype(obj->ob_type, &pyBuffer_Type)) - return 1; - return 0; -} - -PyObject* pyBuffer_Steal(uint8_t* buffer, size_t size) { - pyBuffer* obj = PyObject_New(pyBuffer, &pyBuffer_Type); - obj->m_buffer = buffer; - obj->m_size = size; - return (PyObject*)obj; -} - -}; diff --git a/korlib/buffer.h b/korlib/buffer.h deleted file mode 100644 index 33b739b..0000000 --- a/korlib/buffer.h +++ /dev/null @@ -1,37 +0,0 @@ -/* This file is part of Korman. - * - * Korman is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Korman is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with Korman. If not, see . - */ - -#ifndef _KORLIB_BUFFER_H -#define _KORLIB_BUFFER_H - -#include "korlib.h" - -extern "C" { - -typedef struct { - PyObject_HEAD - uint8_t* m_buffer; - size_t m_size; -} pyBuffer; - -extern PyTypeObject pyBuffer_Type; -PyObject* Init_pyBuffer_Type(); -int pyBuffer_Check(PyObject*); -PyObject* pyBuffer_Steal(uint8_t*, size_t); - -} - -#endif // _KORLIB_BUFFER_H diff --git a/korlib/module.cpp b/korlib/module.cpp index 8850715..4f6d257 100644 --- a/korlib/module.cpp +++ b/korlib/module.cpp @@ -14,7 +14,6 @@ * along with Korman. If not, see . */ -#include "buffer.h" #include "bumpmap.h" #include "sound.h" #include "texture.h" @@ -44,7 +43,6 @@ PyMODINIT_FUNC PyInit__korlib() { PyObject* module = PyModule_Create(&korlib_Module); // Module classes... - PyModule_AddObject(module, "Buffer", Init_pyBuffer_Type()); PyModule_AddObject(module, "GLTexture", Init_pyGLTexture_Type()); return module; diff --git a/korlib/texture.cpp b/korlib/texture.cpp index 0fe6615..049c94e 100644 --- a/korlib/texture.cpp +++ b/korlib/texture.cpp @@ -15,7 +15,6 @@ */ #include "texture.h" -#include "buffer.h" #ifdef _WIN32 # define WIN32_LEAN_AND_MEAN @@ -302,7 +301,7 @@ static PyObject* pyGLTexture_get_level_data(pyGLTexture* self, PyObject* args, P _LevelData data = _get_level_data(self, level, bgra, report); if (fast) - return pyBuffer_Steal(data.m_data, data.m_dataSize); + return PyBytes_FromStringAndSize((char*)data.m_data, data.m_dataSize); // OpenGL returns a flipped image, so we must reflip it. size_t row_stride = data.m_width * 4; @@ -331,39 +330,7 @@ static PyObject* pyGLTexture_get_level_data(pyGLTexture* self, PyObject* args, P data.m_data[i + 3] = (data.m_data[i + 0] + data.m_data[i + 1] + data.m_data[i + 2]) / 3; } - return pyBuffer_Steal(data.m_data, data.m_dataSize); -} - -static PyObject* pyGLTexture_store_in_mipmap(pyGLTexture* self, PyObject* args) { - pyMipmap* pymipmap; - PyObject* levels; - size_t compression; - if (!PyArg_ParseTuple(args, "OOn", &pymipmap, &levels, &compression) || !PySequence_Check(levels)) { - PyErr_SetString(PyExc_TypeError, "store_in_mipmap expects a plMipmap, sequence of Buffer and int"); - return NULL; - } - - // Since we actually have no way of knowing if that really is a pyMipmap... - plMipmap* mipmap = plMipmap::Convert(pymipmap->fThis, false); - if (!mipmap) { - PyErr_SetString(PyExc_TypeError, "store_in_mipmap expects a plMipmap, sequence of Buffer and int"); - return NULL; - } - - for (Py_ssize_t i = 0; i < PySequence_Size(levels); ++i) { - pyBuffer* item = (pyBuffer*)PySequence_GetItem(levels, i); - if (!pyBuffer_Check((PyObject*)item)) { - PyErr_SetString(PyExc_TypeError, "store_in_mipmap expects a plMipmap, sequence of Buffer and int"); - return NULL; - } - - if (compression == plBitmap::kDirectXCompression) - mipmap->CompressImage(i, item->m_buffer, item->m_size); - else - mipmap->setLevelData(i, item->m_buffer, item->m_size); - } - - Py_RETURN_NONE; + return PyBytes_FromStringAndSize((char*)data.m_data, data.m_dataSize); } static PyMethodDef pyGLTexture_Methods[] = { @@ -372,7 +339,6 @@ static PyMethodDef pyGLTexture_Methods[] = { { _pycs("generate_mipmap"), (PyCFunction)pyGLTexture_generate_mipmap, METH_NOARGS, NULL }, { _pycs("get_level_data"), (PyCFunction)pyGLTexture_get_level_data, METH_KEYWORDS | METH_VARARGS, NULL }, - { _pycs("store_in_mipmap"), (PyCFunction)pyGLTexture_store_in_mipmap, METH_VARARGS, NULL }, { NULL, NULL, 0, NULL } }; diff --git a/korman/korlib/texture.py b/korman/korlib/texture.py index 194add9..10d9388 100644 --- a/korman/korlib/texture.py +++ b/korman/korlib/texture.py @@ -181,8 +181,3 @@ class GLTexture: # texture in a single pixel?" # :) return max(numLevels - 2, 2) - - def store_in_mipmap(self, mipmap, data, compression): - func = mipmap.CompressImage if compression == plBitmap.kDirectXCompression else mipmap.setLevel - for i, level in enumerate(data): - func(i, level) From 72a4cf179eed11deffd7de98b3912b672221a5ef Mon Sep 17 00:00:00 2001 From: Adam Johnson Date: Mon, 3 Sep 2018 20:15:19 -0400 Subject: [PATCH 3/5] Add export-time texcache controls Now you can turn off the texture cache entirely or force a recache of the entire age... :) --- korman/exporter/convert.py | 4 ++++ korman/exporter/image.py | 9 +++++++-- korman/operators/op_export.py | 11 ++++++++++- korman/ui/ui_world.py | 1 + 4 files changed, 22 insertions(+), 3 deletions(-) diff --git a/korman/exporter/convert.py b/korman/exporter/convert.py index f0647fb..98f1cea 100644 --- a/korman/exporter/convert.py +++ b/korman/exporter/convert.py @@ -360,3 +360,7 @@ class Exporter: filepath = str(Path(filepath).with_suffix(".ktc")) age.texcache_path = filepath return filepath + + @property + def texcache_method(self): + return bpy.context.scene.world.plasma_age.texcache_method diff --git a/korman/exporter/image.py b/korman/exporter/image.py index fff107e..908b4d0 100644 --- a/korman/exporter/image.py +++ b/korman/exporter/image.py @@ -68,7 +68,7 @@ class ImageCache: self._stream_handles = 0 def add_texture(self, key, num_levels, export_size, compression, data): - if key.ephemeral: + if key.ephemeral or self._exporter().texcache_method == "skip": return image = _CachedImage() image.name = str(key) @@ -98,7 +98,7 @@ class ImageCache: self._read_stream.close() def get_from_texture(self, texture, compression): - if texture.ephemeral: + if self._exporter().texcache_method != "use" or texture.ephemeral: return None key = (str(texture), compression) @@ -123,6 +123,8 @@ class ImageCache: return cached_image def load(self): + if self._exporter().texcache_method == "skip": + return try: with self: self._read(self._read_stream) @@ -223,6 +225,9 @@ class ImageCache: return self._exporter().report def save(self): + if self._exporter().texcache_method == "skip": + return + # TODO: add a way to preserve unused images for a brief period so we don't toss # already cached images that are only removed from the age temporarily... self._compact() diff --git a/korman/operators/op_export.py b/korman/operators/op_export.py index d06def1..97f2e44 100644 --- a/korman/operators/op_export.py +++ b/korman/operators/op_export.py @@ -49,7 +49,15 @@ class ExportOperator(bpy.types.Operator): "description": "Forces the Blender System Console open during the export", "default": True}), - "texcache_path": (StringProperty, {"name": "Texture Cache"}), + "texcache_path": (StringProperty, {"name": "Texture Cache Path", + "description": "Texture Cache Filepath"}), + + "texcache_method": (EnumProperty, {"name": "Texture Cache", + "description": "Texture Cache Settings", + "items": [("skip", "Don't Use Texture Cache", "The texture cache is neither used nor updated."), + ("use", "Use Texture Cache", "Use (and update, if needed) cached textures."), + ("rebuild", "Rebuild Texture Cache", "Rebuilds the texture cache from scratch.")], + "default": "use"}), } # This wigs out and very bad things happen if it's not directly on the operator... @@ -68,6 +76,7 @@ class ExportOperator(bpy.types.Operator): # The crazy mess we're doing with props on the fly means we have to explicitly draw them :( layout.prop(self, "version") + layout.prop(age, "texcache_method", text="") layout.prop(age, "bake_lighting") row = layout.row() row.enabled = ConsoleToggler.is_platform_supported() diff --git a/korman/ui/ui_world.py b/korman/ui/ui_world.py index 333dbbc..6cac4e4 100644 --- a/korman/ui/ui_world.py +++ b/korman/ui/ui_world.py @@ -133,6 +133,7 @@ class PlasmaAgePanel(AgeButtonsPanel, bpy.types.Panel): col = split.column() col.label("Export Settings:") + col.prop(age, "texcache_method", text="") col.prop(age, "bake_lighting") cons_ui = col.column() cons_ui.enabled = ConsoleToggler.is_platform_supported() From 62f8b59ce2eed35953f3d62afec81165476fa5a7 Mon Sep 17 00:00:00 2001 From: Adam Johnson Date: Tue, 4 Sep 2018 19:37:06 -0400 Subject: [PATCH 4/5] Add per-image cache controls --- korman/exporter/image.py | 29 ++++++++++++++++++++++------- korman/properties/__init__.py | 2 ++ korman/properties/prop_image.py | 26 ++++++++++++++++++++++++++ korman/ui/__init__.py | 1 + korman/ui/ui_image.py | 25 +++++++++++++++++++++++++ 5 files changed, 76 insertions(+), 7 deletions(-) create mode 100644 korman/properties/prop_image.py create mode 100644 korman/ui/ui_image.py diff --git a/korman/exporter/image.py b/korman/exporter/image.py index 908b4d0..1762f7e 100644 --- a/korman/exporter/image.py +++ b/korman/exporter/image.py @@ -67,17 +67,26 @@ class ImageCache: self._read_stream = hsFileStream() self._stream_handles = 0 - def add_texture(self, key, num_levels, export_size, compression, data): - if key.ephemeral or self._exporter().texcache_method == "skip": + def add_texture(self, texture, num_levels, export_size, compression, data): + image = texture.image + image_name = str(texture) + key = (image_name, compression) + ex_method, im_method = self._exporter().texcache_method, image.plasma_image.texcache_method + method = set((ex_method, im_method)) + if texture.ephemeral or "skip" in method: + self._images.pop(key, None) return + elif im_method == "rebuild": + image.plasma_image.texcache_method = "use" + image = _CachedImage() - image.name = str(key) + image.name = image_name image.mip_levels = num_levels image.compression = compression - image.source_size = key.image.size + image.source_size = texture.image.size image.export_size = export_size image.image_data = data - self._images[(image.name, compression)] = image + self._images[key] = image def _compact(self): for key, image in self._images.copy().items(): @@ -98,7 +107,14 @@ class ImageCache: self._read_stream.close() def get_from_texture(self, texture, compression): - if self._exporter().texcache_method != "use" or texture.ephemeral: + bl_image = texture.image + + # If the texture is ephemeral (eg a lightmap) or has been marked "rebuild" or "skip" + # in the UI, we don't want anything from the cache. In the first two cases, we never + # want to cache that crap. In the latter case, we just want to signal a recache is needed. + ex_method, im_method = self._exporter().texcache_method, texture.image.plasma_image.texcache_method + method = set((ex_method, im_method)) + if method != {"use"} or texture.ephemeral: return None key = (str(texture), compression) @@ -108,7 +124,6 @@ class ImageCache: # ensure the texture key generally matches up with our copy of this image. # if not, a recache will likely be triggered implicitly. - bl_image = texture.image if tuple(bl_image.size) != cached_image.source_size: return None diff --git a/korman/properties/__init__.py b/korman/properties/__init__.py index aed2561..4da436b 100644 --- a/korman/properties/__init__.py +++ b/korman/properties/__init__.py @@ -16,6 +16,7 @@ import bpy from .prop_camera import * +from .prop_image import * from .prop_lamp import * from . import modifiers from .prop_object import * @@ -25,6 +26,7 @@ from .prop_world import * def register(): bpy.types.Camera.plasma_camera = bpy.props.PointerProperty(type=PlasmaCamera) + bpy.types.Image.plasma_image = bpy.props.PointerProperty(type=PlasmaImage) bpy.types.Lamp.plasma_lamp = bpy.props.PointerProperty(type=PlasmaLamp) bpy.types.Object.plasma_net = bpy.props.PointerProperty(type=PlasmaNet) bpy.types.Object.plasma_object = bpy.props.PointerProperty(type=PlasmaObject) diff --git a/korman/properties/prop_image.py b/korman/properties/prop_image.py new file mode 100644 index 0000000..14646d7 --- /dev/null +++ b/korman/properties/prop_image.py @@ -0,0 +1,26 @@ +# This file is part of Korman. +# +# Korman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Korman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Korman. If not, see . + +import bpy +from bpy.props import * + +class PlasmaImage(bpy.types.PropertyGroup): + texcache_method = EnumProperty(name="Texture Cache", + description="Texture Cache Settings", + items=[("skip", "Don't Cache Image", "This image is never cached."), + ("use", "Use Image Cache", "This image should be cached."), + ("rebuild", "Refresh Image Cache", "Forces this image to be recached on the next export.")], + default="use", + options=set()) diff --git a/korman/ui/__init__.py b/korman/ui/__init__.py index dde0d43..d1ef238 100644 --- a/korman/ui/__init__.py +++ b/korman/ui/__init__.py @@ -14,6 +14,7 @@ # along with Korman. If not, see . from .ui_camera import * +from .ui_image import * from .ui_lamp import * from .ui_list import * from .ui_menus import * diff --git a/korman/ui/ui_image.py b/korman/ui/ui_image.py new file mode 100644 index 0000000..d50a895 --- /dev/null +++ b/korman/ui/ui_image.py @@ -0,0 +1,25 @@ +# This file is part of Korman. +# +# Korman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Korman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Korman. If not, see . + +import bpy + +class PlasmaImageEditorHeader(bpy.types.Header): + bl_space_type = "IMAGE_EDITOR" + + def draw(self, context): + layout, image = self.layout, context.space_data.image + settings = image.plasma_image + + layout.prop(settings, "texcache_method", text="") From fdc6ae3f225e6ca2a5ef378212ddec3c1741c31f Mon Sep 17 00:00:00 2001 From: Adam Johnson Date: Tue, 4 Sep 2018 20:07:05 -0400 Subject: [PATCH 5/5] Recache on file updates Unfortunately, packed images don't appear to store any kind of modify time attribute. In the case of those, we're just idly hoping that we can find one on disk. --- korman/exporter/image.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/korman/exporter/image.py b/korman/exporter/image.py index 1762f7e..dc396f7 100644 --- a/korman/exporter/image.py +++ b/korman/exporter/image.py @@ -16,6 +16,7 @@ import enum from pathlib import Path from PyHSPlasma import * +import time import weakref _HEADER_MAGICK = b"KTH\x00" @@ -44,6 +45,7 @@ class _EntryBits(enum.IntEnum): compression = 3 source_size = 4 export_size = 5 + last_export = 6 class _CachedImage: @@ -55,6 +57,8 @@ class _CachedImage: self.source_size = None self.export_size = None self.compression = None + self.export_time = None + self.modify_time = None def __str__(self): return self.name @@ -127,6 +131,19 @@ class ImageCache: if tuple(bl_image.size) != cached_image.source_size: return None + # if the image is on the disk, we can check the its modify time for changes + if cached_image.modify_time is None: + # if the image is packed, the filepath will be some garbage beginning with + # the string "//". There isn't much we can do with that, unless the user + # happens to have an unpacked copy lying around somewheres... + path = Path(bl_image.filepath_from_user()) + if path.is_file(): + cached_image.modify_time = path.stat().st_mtime + if cached_image.export_time and cached_image.export_time < cached_image.modify_time: + return None + else: + cached_image.modify_time = 0 + # ensure the data has been loaded from the cache if cached_image.image_data is None: try: @@ -231,6 +248,8 @@ class ImageCache: image.source_size = (stream.readInt(), stream.readInt()) if flags[_EntryBits.export_size]: image.export_size = (stream.readInt(), stream.readInt()) + if flags[_EntryBits.last_export]: + image.export_time = stream.readDouble() # do we need to check for duplicate images? self._images[(image.name, image.compression)] = image @@ -307,6 +326,7 @@ class ImageCache: flags[_EntryBits.compression] = True flags[_EntryBits.source_size] = True flags[_EntryBits.export_size] = True + flags[_EntryBits.last_export] = True stream.write(_ENTRY_MAGICK) flags.write(stream) @@ -318,3 +338,4 @@ class ImageCache: stream.writeInt(image.source_size[1]) stream.writeInt(image.export_size[0]) stream.writeInt(image.export_size[1]) + stream.writeDouble(time.time())