diff --git a/korlib/CMakeLists.txt b/korlib/CMakeLists.txt index a00a011..b0fc144 100644 --- a/korlib/CMakeLists.txt +++ b/korlib/CMakeLists.txt @@ -18,7 +18,6 @@ endif() # Da files set(korlib_HEADERS - buffer.h bumpmap.h korlib.h sound.h @@ -26,7 +25,6 @@ set(korlib_HEADERS ) set(korlib_SOURCES - buffer.cpp bumpmap.cpp module.cpp sound.cpp diff --git a/korlib/buffer.cpp b/korlib/buffer.cpp deleted file mode 100644 index 120f526..0000000 --- a/korlib/buffer.cpp +++ /dev/null @@ -1,111 +0,0 @@ -/* This file is part of Korman. - * - * Korman is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Korman is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with Korman. If not, see . - */ - -#include "buffer.h" - -extern "C" { - -static void pyBuffer_dealloc(pyBuffer* self) { - delete[] self->m_buffer; - Py_TYPE(self)->tp_free((PyObject*)self); -} - -static PyObject* pyBuffer_new(PyTypeObject* type, PyObject* args, PyObject* kwds) { - PyErr_SetString(PyExc_RuntimeError, "Buffers cannot be created by mere mortals"); - return NULL; -} - -PyTypeObject pyBuffer_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_korlib.Buffer", /* tp_name */ - sizeof(pyBuffer), /* tp_basicsize */ - 0, /* tp_itemsize */ - - (destructor)pyBuffer_dealloc, /* tp_dealloc */ - NULL, /* tp_print */ - NULL, /* tp_getattr */ - NULL, /* tp_setattr */ - NULL, /* tp_compare */ - NULL, /* tp_repr */ - NULL, /* tp_as_number */ - NULL, /* tp_as_sequence */ - NULL, /* tp_as_mapping */ - NULL, /* tp_hash */ - NULL, /* tp_call */ - NULL, /* tp_str */ - NULL, /* tp_getattro */ - NULL, /* tp_setattro */ - NULL, /* tp_as_buffer */ - - Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - "Buffer", /* tp_doc */ - - NULL, /* tp_traverse */ - NULL, /* tp_clear */ - NULL, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - NULL, /* tp_iter */ - NULL, /* tp_iternext */ - - NULL, /* tp_methods */ - NULL, /* tp_members */ - NULL, /* tp_getset */ - NULL, /* tp_base */ - NULL, /* tp_dict */ - NULL, /* tp_descr_get */ - NULL, /* tp_descr_set */ - 0, /* tp_dictoffset */ - - NULL, /* tp_init */ - NULL, /* tp_alloc */ - pyBuffer_new, /* tp_new */ - NULL, /* tp_free */ - NULL, /* tp_is_gc */ - - NULL, /* tp_bases */ - NULL, /* tp_mro */ - NULL, /* tp_cache */ - NULL, /* tp_subclasses */ - NULL, /* tp_weaklist */ - - NULL, /* tp_del */ - 0, /* tp_version_tag */ - NULL, /* tp_finalize */ -}; - -PyObject* Init_pyBuffer_Type() { - if (PyType_Ready(&pyBuffer_Type) < 0) - return NULL; - - Py_INCREF(&pyBuffer_Type); - return (PyObject*)&pyBuffer_Type; -} - -int pyBuffer_Check(PyObject* obj) { - if (obj->ob_type == &pyBuffer_Type - || PyType_IsSubtype(obj->ob_type, &pyBuffer_Type)) - return 1; - return 0; -} - -PyObject* pyBuffer_Steal(uint8_t* buffer, size_t size) { - pyBuffer* obj = PyObject_New(pyBuffer, &pyBuffer_Type); - obj->m_buffer = buffer; - obj->m_size = size; - return (PyObject*)obj; -} - -}; diff --git a/korlib/buffer.h b/korlib/buffer.h deleted file mode 100644 index 33b739b..0000000 --- a/korlib/buffer.h +++ /dev/null @@ -1,37 +0,0 @@ -/* This file is part of Korman. - * - * Korman is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as published by - * the Free Software Foundation, either version 3 of the License, or - * (at your option) any later version. - * - * Korman is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public License - * along with Korman. If not, see . - */ - -#ifndef _KORLIB_BUFFER_H -#define _KORLIB_BUFFER_H - -#include "korlib.h" - -extern "C" { - -typedef struct { - PyObject_HEAD - uint8_t* m_buffer; - size_t m_size; -} pyBuffer; - -extern PyTypeObject pyBuffer_Type; -PyObject* Init_pyBuffer_Type(); -int pyBuffer_Check(PyObject*); -PyObject* pyBuffer_Steal(uint8_t*, size_t); - -} - -#endif // _KORLIB_BUFFER_H diff --git a/korlib/module.cpp b/korlib/module.cpp index 8850715..4f6d257 100644 --- a/korlib/module.cpp +++ b/korlib/module.cpp @@ -14,7 +14,6 @@ * along with Korman. If not, see . */ -#include "buffer.h" #include "bumpmap.h" #include "sound.h" #include "texture.h" @@ -44,7 +43,6 @@ PyMODINIT_FUNC PyInit__korlib() { PyObject* module = PyModule_Create(&korlib_Module); // Module classes... - PyModule_AddObject(module, "Buffer", Init_pyBuffer_Type()); PyModule_AddObject(module, "GLTexture", Init_pyGLTexture_Type()); return module; diff --git a/korlib/texture.cpp b/korlib/texture.cpp index 0fe6615..049c94e 100644 --- a/korlib/texture.cpp +++ b/korlib/texture.cpp @@ -15,7 +15,6 @@ */ #include "texture.h" -#include "buffer.h" #ifdef _WIN32 # define WIN32_LEAN_AND_MEAN @@ -302,7 +301,7 @@ static PyObject* pyGLTexture_get_level_data(pyGLTexture* self, PyObject* args, P _LevelData data = _get_level_data(self, level, bgra, report); if (fast) - return pyBuffer_Steal(data.m_data, data.m_dataSize); + return PyBytes_FromStringAndSize((char*)data.m_data, data.m_dataSize); // OpenGL returns a flipped image, so we must reflip it. size_t row_stride = data.m_width * 4; @@ -331,39 +330,7 @@ static PyObject* pyGLTexture_get_level_data(pyGLTexture* self, PyObject* args, P data.m_data[i + 3] = (data.m_data[i + 0] + data.m_data[i + 1] + data.m_data[i + 2]) / 3; } - return pyBuffer_Steal(data.m_data, data.m_dataSize); -} - -static PyObject* pyGLTexture_store_in_mipmap(pyGLTexture* self, PyObject* args) { - pyMipmap* pymipmap; - PyObject* levels; - size_t compression; - if (!PyArg_ParseTuple(args, "OOn", &pymipmap, &levels, &compression) || !PySequence_Check(levels)) { - PyErr_SetString(PyExc_TypeError, "store_in_mipmap expects a plMipmap, sequence of Buffer and int"); - return NULL; - } - - // Since we actually have no way of knowing if that really is a pyMipmap... - plMipmap* mipmap = plMipmap::Convert(pymipmap->fThis, false); - if (!mipmap) { - PyErr_SetString(PyExc_TypeError, "store_in_mipmap expects a plMipmap, sequence of Buffer and int"); - return NULL; - } - - for (Py_ssize_t i = 0; i < PySequence_Size(levels); ++i) { - pyBuffer* item = (pyBuffer*)PySequence_GetItem(levels, i); - if (!pyBuffer_Check((PyObject*)item)) { - PyErr_SetString(PyExc_TypeError, "store_in_mipmap expects a plMipmap, sequence of Buffer and int"); - return NULL; - } - - if (compression == plBitmap::kDirectXCompression) - mipmap->CompressImage(i, item->m_buffer, item->m_size); - else - mipmap->setLevelData(i, item->m_buffer, item->m_size); - } - - Py_RETURN_NONE; + return PyBytes_FromStringAndSize((char*)data.m_data, data.m_dataSize); } static PyMethodDef pyGLTexture_Methods[] = { @@ -372,7 +339,6 @@ static PyMethodDef pyGLTexture_Methods[] = { { _pycs("generate_mipmap"), (PyCFunction)pyGLTexture_generate_mipmap, METH_NOARGS, NULL }, { _pycs("get_level_data"), (PyCFunction)pyGLTexture_get_level_data, METH_KEYWORDS | METH_VARARGS, NULL }, - { _pycs("store_in_mipmap"), (PyCFunction)pyGLTexture_store_in_mipmap, METH_VARARGS, NULL }, { NULL, NULL, 0, NULL } }; diff --git a/korman/exporter/convert.py b/korman/exporter/convert.py index 558d9f6..98f1cea 100644 --- a/korman/exporter/convert.py +++ b/korman/exporter/convert.py @@ -23,6 +23,7 @@ from . import animation from . import camera from . import explosions from . import etlight +from . import image from . import logger from . import manager from . import mesh @@ -54,6 +55,7 @@ class Exporter: self.animation = animation.AnimationConverter(self) self.sumfile = sumfile.SumFile() self.camera = camera.CameraConverter(self) + self.image = image.ImageCache(self) # Step 0.8: Init the progress mgr self.mesh.add_progress_presteps(self.report) @@ -66,6 +68,7 @@ class Exporter: self.report.progress_add_step("Finalizing Plasma Logic") self.report.progress_add_step("Exporting Textures") self.report.progress_add_step("Composing Geometry") + self.report.progress_add_step("Saving Age Files") self.report.progress_start("EXPORTING AGE") # Step 0.9: Apply modifiers to all meshes temporarily. @@ -101,7 +104,7 @@ class Exporter: self.mesh.finalize() # Step 5: FINALLY. Let's write the PRPs and crap. - self.mgr.save_age(Path(self._op.filepath)) + self._save_age() # Step 5.1: Save out the export report. # If the export fails and this doesn't save, we have bigger problems than @@ -340,3 +343,24 @@ class Exporter: if proc is not None: proc(self, bl_obj, sceneobject) inc_progress() + + def _save_age(self): + self.report.progress_advance() + self.mgr.save_age(Path(self._op.filepath)) + self.image.save() + + @property + def texcache_path(self): + age = bpy.context.scene.world.plasma_age + filepath = age.texcache_path + if not filepath or not Path(filepath).is_file(): + filepath = bpy.context.blend_data.filepath + if not filepath: + filepath = self.filepath + filepath = str(Path(filepath).with_suffix(".ktc")) + age.texcache_path = filepath + return filepath + + @property + def texcache_method(self): + return bpy.context.scene.world.plasma_age.texcache_method diff --git a/korman/exporter/image.py b/korman/exporter/image.py new file mode 100644 index 0000000..dc396f7 --- /dev/null +++ b/korman/exporter/image.py @@ -0,0 +1,341 @@ +# This file is part of Korman. +# +# Korman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Korman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Korman. If not, see . + +import enum +from pathlib import Path +from PyHSPlasma import * +import time +import weakref + +_HEADER_MAGICK = b"KTH\x00" +_INDEX_MAGICK = b"KTI\x00" +_DATA_MAGICK = b"KTC\x00" +_ENTRY_MAGICK = b"KTE\x00" +_IMAGE_MAGICK = b"KTT\x00" +_MIP_MAGICK = b"KTM\x00" + +@enum.unique +class _HeaderBits(enum.IntEnum): + last_export = 0 + index_pos = 1 + + +@enum.unique +class _IndexBits(enum.IntEnum): + image_count = 0 + + +@enum.unique +class _EntryBits(enum.IntEnum): + image_name = 0 + mip_levels = 1 + image_pos = 2 + compression = 3 + source_size = 4 + export_size = 5 + last_export = 6 + + +class _CachedImage: + def __init__(self): + self.name = None + self.mip_levels = 1 + self.data_pos = None + self.image_data = None + self.source_size = None + self.export_size = None + self.compression = None + self.export_time = None + self.modify_time = None + + def __str__(self): + return self.name + + +class ImageCache: + def __init__(self, exporter): + self._exporter = weakref.ref(exporter) + self._images = {} + self._read_stream = hsFileStream() + self._stream_handles = 0 + + def add_texture(self, texture, num_levels, export_size, compression, data): + image = texture.image + image_name = str(texture) + key = (image_name, compression) + ex_method, im_method = self._exporter().texcache_method, image.plasma_image.texcache_method + method = set((ex_method, im_method)) + if texture.ephemeral or "skip" in method: + self._images.pop(key, None) + return + elif im_method == "rebuild": + image.plasma_image.texcache_method = "use" + + image = _CachedImage() + image.name = image_name + image.mip_levels = num_levels + image.compression = compression + image.source_size = texture.image.size + image.export_size = export_size + image.image_data = data + self._images[key] = image + + def _compact(self): + for key, image in self._images.copy().items(): + if image.image_data is None: + self._images.pop(key) + + def __enter__(self): + if self._stream_handles == 0: + path = self._exporter().texcache_path + if Path(path).is_file(): + self._read_stream.open(path, fmRead) + self._stream_handles += 1 + return self + + def __exit__(self, type, value, tb): + self._stream_handles -= 1 + if self._stream_handles == 0: + self._read_stream.close() + + def get_from_texture(self, texture, compression): + bl_image = texture.image + + # If the texture is ephemeral (eg a lightmap) or has been marked "rebuild" or "skip" + # in the UI, we don't want anything from the cache. In the first two cases, we never + # want to cache that crap. In the latter case, we just want to signal a recache is needed. + ex_method, im_method = self._exporter().texcache_method, texture.image.plasma_image.texcache_method + method = set((ex_method, im_method)) + if method != {"use"} or texture.ephemeral: + return None + + key = (str(texture), compression) + cached_image = self._images.get(key) + if cached_image is None: + return None + + # ensure the texture key generally matches up with our copy of this image. + # if not, a recache will likely be triggered implicitly. + if tuple(bl_image.size) != cached_image.source_size: + return None + + # if the image is on the disk, we can check the its modify time for changes + if cached_image.modify_time is None: + # if the image is packed, the filepath will be some garbage beginning with + # the string "//". There isn't much we can do with that, unless the user + # happens to have an unpacked copy lying around somewheres... + path = Path(bl_image.filepath_from_user()) + if path.is_file(): + cached_image.modify_time = path.stat().st_mtime + if cached_image.export_time and cached_image.export_time < cached_image.modify_time: + return None + else: + cached_image.modify_time = 0 + + # ensure the data has been loaded from the cache + if cached_image.image_data is None: + try: + cached_image.image_data = tuple(self._read_image_data(cached_image, self._read_stream)) + except AssertionError: + self._report.warn("Cached copy of '{}' is corrupt and will be discarded", cached_image.name, indent=2) + self._images.pop(key) + return None + return cached_image + + def load(self): + if self._exporter().texcache_method == "skip": + return + try: + with self: + self._read(self._read_stream) + except AssertionError: + self._report.warn("Texture Cache is corrupt and will be regenerated") + self._images.clear() + + def _read(self, stream): + if stream.size == 0: + return + stream.seek(0) + assert stream.read(4) == _HEADER_MAGICK + + # if we use a bit vector to define our header strcture, we can add + # new fields without having to up the file version, trashing old + # texture cache files... :) + flags = hsBitVector() + flags.read(stream) + + # ALWAYS ADD NEW FIELDS TO THE END OF THIS SECTION!!!!!!! + if flags[_HeaderBits.last_export]: + self.last_export = stream.readDouble() + if flags[_HeaderBits.index_pos]: + index_pos = stream.readInt() + self._read_index(index_pos, stream) + + def _read_image_data(self, image, stream): + if image.data_pos is None: + return None + + assert stream.size > 0 + stream.seek(image.data_pos) + assert stream.read(4) == _IMAGE_MAGICK + + # unused currently + image_flags = hsBitVector() + image_flags.read(stream) + + # given this is a generator, someone else might change our stream position + # between iterations, so we'd best bookkeep the position + pos = stream.pos + + for i in range(image.mip_levels): + if stream.pos != pos: + stream.seek(pos) + assert stream.read(4) == _MIP_MAGICK + + # this should only ever be image data... + # store your flags somewhere else! + size = stream.readInt() + data = stream.read(size) + pos = stream.pos + yield data + + def _read_index(self, index_pos, stream): + stream.seek(index_pos) + assert stream.read(4) == _INDEX_MAGICK + + # See above, can change the index format easily... + flags = hsBitVector() + flags.read(stream) + + # ALWAYS ADD NEW FIELDS TO THE END OF THIS SECTION!!!!!!! + image_count = stream.readInt() if flags[_IndexBits.image_count] else 0 + + # Here begins the image map + assert stream.read(4) == _DATA_MAGICK + for i in range(image_count): + self._read_index_entry(stream) + + def _read_index_entry(self, stream): + assert stream.read(4) == _ENTRY_MAGICK + image = _CachedImage() + + # See above, can change the entry format easily... + flags = hsBitVector() + flags.read(stream) + + # ALWAYS ADD NEW FIELDS TO THE END OF THIS SECTION!!!!!!! + if flags[_EntryBits.image_name]: + image.name = stream.readSafeWStr() + if flags[_EntryBits.mip_levels]: + image.mip_levels = stream.readByte() + if flags[_EntryBits.image_pos]: + image.data_pos = stream.readInt() + if flags[_EntryBits.compression]: + image.compression = stream.readByte() + if flags[_EntryBits.source_size]: + image.source_size = (stream.readInt(), stream.readInt()) + if flags[_EntryBits.export_size]: + image.export_size = (stream.readInt(), stream.readInt()) + if flags[_EntryBits.last_export]: + image.export_time = stream.readDouble() + + # do we need to check for duplicate images? + self._images[(image.name, image.compression)] = image + + @property + def _report(self): + return self._exporter().report + + def save(self): + if self._exporter().texcache_method == "skip": + return + + # TODO: add a way to preserve unused images for a brief period so we don't toss + # already cached images that are only removed from the age temporarily... + self._compact() + + # Assume all read operations are done (don't be within' my cache while you savin') + assert self._stream_handles == 0 + + with hsFileStream().open(self._exporter().texcache_path, fmWrite) as stream: + self._write(stream) + + def _write(self, stream): + flags = hsBitVector() + flags[_HeaderBits.index_pos] = True + + stream.seek(0) + stream.write(_HEADER_MAGICK) + flags.write(stream) + header_index_pos = stream.pos + stream.writeInt(-1) + + for image in self._images.values(): + self._write_image_data(image, stream) + + # fix the index position + index_pos = stream.pos + self._write_index(stream) + stream.seek(header_index_pos) + stream.writeInt(index_pos) + + def _write_image_data(self, image, stream): + # unused currently + flags = hsBitVector() + + image.data_pos = stream.pos + stream.write(_IMAGE_MAGICK) + flags.write(stream) + + for i in image.image_data: + stream.write(_MIP_MAGICK) + stream.writeInt(len(i)) + stream.write(i) + + def _write_index(self, stream): + flags = hsBitVector() + flags[_IndexBits.image_count] = True + + pos = stream.pos + stream.write(_INDEX_MAGICK) + flags.write(stream) + stream.writeInt(len(self._images)) + + stream.write(_DATA_MAGICK) + for image in self._images.values(): + self._write_index_entry(image, stream) + return pos + + def _write_index_entry(self, image, stream): + flags = hsBitVector() + flags[_EntryBits.image_name] = True + flags[_EntryBits.mip_levels] = True + flags[_EntryBits.image_pos] = True + flags[_EntryBits.compression] = True + flags[_EntryBits.source_size] = True + flags[_EntryBits.export_size] = True + flags[_EntryBits.last_export] = True + + stream.write(_ENTRY_MAGICK) + flags.write(stream) + stream.writeSafeWStr(str(image)) + stream.writeByte(image.mip_levels) + stream.writeInt(image.data_pos) + stream.writeByte(image.compression) + stream.writeInt(image.source_size[0]) + stream.writeInt(image.source_size[1]) + stream.writeInt(image.export_size[0]) + stream.writeInt(image.export_size[1]) + stream.writeDouble(time.time()) diff --git a/korman/exporter/material.py b/korman/exporter/material.py index 2bf9927..96f31ef 100644 --- a/korman/exporter/material.py +++ b/korman/exporter/material.py @@ -82,6 +82,7 @@ class _Texture: else: self.auto_ext = "hsm" self.extension = kwargs.get("extension", self.auto_ext) + self.ephemeral = kwargs.get("ephemeral", False) self.image = image def __eq__(self, other): @@ -653,6 +654,7 @@ class MaterialConverter: to use the image datablock extension, set this to None - indent: (optional) indentation level for log messages default: 2 + - ephemeral: (optional) never cache this image """ owner = kwargs.pop("owner", None) indent = kwargs.pop("indent", 2) @@ -672,91 +674,117 @@ class MaterialConverter: inc_progress = self._report.progress_increment mgr = self._mgr - for key, owners in self._pending.items(): - name = str(key) - self._report.msg("\n[Mipmap '{}']", name) - - image = key.image - oWidth, oHeight = image.size - if oWidth == 0 and oHeight == 0: - raise ExportError("Image '{}' could not be loaded.".format(image.name)) - - eWidth = helpers.ensure_power_of_two(oWidth) - eHeight = helpers.ensure_power_of_two(oHeight) - if (eWidth != oWidth) or (eHeight != oHeight): - self._report.msg("Image is not a POT ({}x{}) resizing to {}x{}", - oWidth, oHeight, eWidth, eHeight, indent=1) - self._resize_image(image, eWidth, eHeight) - - # Now we try to use the pile of hints we were given to figure out what format to use - allowed_formats = key.allowed_formats - if key.mipmap: - compression = plBitmap.kDirectXCompression - elif "PNG" in allowed_formats and self._mgr.getVer() == pvMoul: - compression = plBitmap.kPNGCompression - elif "DDS" in allowed_formats: - compression = plBitmap.kDirectXCompression - elif "JPG" in allowed_formats: - compression = plBitmap.kJPEGCompression - elif "BMP" in allowed_formats: - compression = plBitmap.kUncompressed - else: - raise RuntimeError(allowed_formats) - dxt = plBitmap.kDXT5 if key.use_alpha or key.calc_alpha else plBitmap.kDXT1 - - # Grab the image data from OpenGL and stuff it into the plBitmap - helper = GLTexture(key) - with helper as glimage: - if compression == plBitmap.kDirectXCompression: - numLevels = glimage.num_levels - self._report.msg("Generating mip levels", indent=1) - glimage.generate_mipmap() + # This with statement causes the texture cache to hold open a + # read stream for the cache file, preventing spurious open-close + # spin washing during this tight loop. Note that the cache still + # has to actually be loaded ^_^ + with self._texcache as texcache: + texcache.load() + + for key, owners in self._pending.items(): + name = str(key) + self._report.msg("\n[Mipmap '{}']", name) + + image = key.image + oWidth, oHeight = image.size + if oWidth == 0 and oHeight == 0: + raise ExportError("Image '{}' could not be loaded.".format(image.name)) + + # Now we try to use the pile of hints we were given to figure out what format to use + allowed_formats = key.allowed_formats + if key.mipmap: + compression = plBitmap.kDirectXCompression + elif "PNG" in allowed_formats and self._mgr.getVer() == pvMoul: + compression = plBitmap.kPNGCompression + elif "DDS" in allowed_formats: + compression = plBitmap.kDirectXCompression + elif "JPG" in allowed_formats: + compression = plBitmap.kJPEGCompression + elif "BMP" in allowed_formats: + compression = plBitmap.kUncompressed else: - numLevels = 1 - self._report.msg("Stuffing image data", indent=1) - - # Non-DXT images are BGRA in Plasma - fmt = compression != plBitmap.kDirectXCompression - - # Hold the uncompressed level data for now. We may have to make multiple copies of - # this mipmap for per-page textures :( - data = [] - for i in range(numLevels): - data.append(glimage.get_level_data(i, key.calc_alpha, fmt, report=self._report)) - - # Be a good citizen and reset the Blender Image to pre-futzing state - image.reload() - - # Now we poke our new bitmap into the pending layers. Note that we have to do some funny - # business to account for per-page textures - pages = {} - - self._report.msg("Adding to...", indent=1) - for owner_key in owners: - owner = owner_key.object - self._report.msg("[{} '{}']", owner.ClassName()[2:], owner_key.name, indent=2) - page = mgr.get_textures_page(owner_key) # Layer's page or Textures.prp - - # If we haven't created this plMipmap in the page (either layer's page or Textures.prp), - # then we need to do that and stuff the level data. This is a little tedious, but we - # need to be careful to manage our resources correctly - if page not in pages: - mipmap = plMipmap(name=name, width=eWidth, height=eHeight, numLevels=numLevels, - compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt) - helper.store_in_mipmap(mipmap, data, compression) - mgr.AddObject(page, mipmap) - pages[page] = mipmap + raise RuntimeError(allowed_formats) + dxt = plBitmap.kDXT5 if key.use_alpha or key.calc_alpha else plBitmap.kDXT1 + + # Mayhaps we have a cached version of this that has already been exported + cached_image = texcache.get_from_texture(key, compression) + + if cached_image is None: + eWidth = helpers.ensure_power_of_two(oWidth) + eHeight = helpers.ensure_power_of_two(oHeight) + if (eWidth != oWidth) or (eHeight != oHeight): + self._report.msg("Image is not a POT ({}x{}) resizing to {}x{}", + oWidth, oHeight, eWidth, eHeight, indent=1) + self._resize_image(image, eWidth, eHeight) + + # Grab the image data from OpenGL and stuff it into the plBitmap + helper = GLTexture(key) + with helper as glimage: + if compression == plBitmap.kDirectXCompression: + numLevels = glimage.num_levels + self._report.msg("Generating mip levels", indent=1) + glimage.generate_mipmap() + else: + numLevels = 1 + self._report.msg("Compressing single level", indent=1) + + # Non-DXT images are BGRA in Plasma + fmt = compression != plBitmap.kDirectXCompression + + # Hold the uncompressed level data for now. We may have to make multiple copies of + # this mipmap for per-page textures :( + data = [] + for i in range(numLevels): + data.append(glimage.get_level_data(i, key.calc_alpha, fmt, report=self._report)) + + # Be a good citizen and reset the Blender Image to pre-futzing state + image.reload() + + # If this is a DXT-compressed mipmap, we need to use a temporary mipmap + # to do the compression. We'll then steal the data from it. + if compression == plBitmap.kDirectXCompression: + mipmap = plMipmap(name=name, width=eWidth, height=eHeight, numLevels=numLevels, + compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt) + for i in range(numLevels): + mipmap.CompressImage(i, data[i]) + data[i] = mipmap.getLevel(i) + texcache.add_texture(key, numLevels, (eWidth, eHeight), compression, data) else: - mipmap = pages[page] + eWidth, eHeight = cached_image.export_size + data = cached_image.image_data + numLevels = cached_image.mip_levels + + # Now we poke our new bitmap into the pending layers. Note that we have to do some funny + # business to account for per-page textures + pages = {} + + self._report.msg("Adding to...", indent=1) + for owner_key in owners: + owner = owner_key.object + self._report.msg("[{} '{}']", owner.ClassName()[2:], owner_key.name, indent=2) + page = mgr.get_textures_page(owner_key) # Layer's page or Textures.prp + + # If we haven't created this plMipmap in the page (either layer's page or Textures.prp), + # then we need to do that and stuff the level data. This is a little tedious, but we + # need to be careful to manage our resources correctly + if page not in pages: + mipmap = plMipmap(name=name, width=eWidth, height=eHeight, numLevels=numLevels, + compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt) + for i, buf in enumerate(data): + mipmap.setLevel(i, buf) + mgr.AddObject(page, mipmap) + pages[page] = mipmap + else: + mipmap = pages[page] - if isinstance(owner, plLayerInterface): - owner.texture = mipmap.key - elif isinstance(owner, plImageLibMod): - owner.addImage(mipmap.key) - else: - raise RuntimeError(owner.ClassName()) + if isinstance(owner, plLayerInterface): + owner.texture = mipmap.key + elif isinstance(owner, plImageLibMod): + owner.addImage(mipmap.key) + else: + raise RuntimeError(owner.ClassName()) - inc_progress() + inc_progress() def get_materials(self, bo): return self._obj2mat.get(bo, []) @@ -843,3 +871,7 @@ class MaterialConverter: self._alphatest[image] = result return result + + @property + def _texcache(self): + return self._exporter().image diff --git a/korman/korlib/texture.py b/korman/korlib/texture.py index 194add9..10d9388 100644 --- a/korman/korlib/texture.py +++ b/korman/korlib/texture.py @@ -181,8 +181,3 @@ class GLTexture: # texture in a single pixel?" # :) return max(numLevels - 2, 2) - - def store_in_mipmap(self, mipmap, data, compression): - func = mipmap.CompressImage if compression == plBitmap.kDirectXCompression else mipmap.setLevel - for i, level in enumerate(data): - func(i, level) diff --git a/korman/operators/op_export.py b/korman/operators/op_export.py index 27b19e1..97f2e44 100644 --- a/korman/operators/op_export.py +++ b/korman/operators/op_export.py @@ -48,6 +48,16 @@ class ExportOperator(bpy.types.Operator): "show_console": (BoolProperty, {"name": "Display Log Console", "description": "Forces the Blender System Console open during the export", "default": True}), + + "texcache_path": (StringProperty, {"name": "Texture Cache Path", + "description": "Texture Cache Filepath"}), + + "texcache_method": (EnumProperty, {"name": "Texture Cache", + "description": "Texture Cache Settings", + "items": [("skip", "Don't Use Texture Cache", "The texture cache is neither used nor updated."), + ("use", "Use Texture Cache", "Use (and update, if needed) cached textures."), + ("rebuild", "Rebuild Texture Cache", "Rebuilds the texture cache from scratch.")], + "default": "use"}), } # This wigs out and very bad things happen if it's not directly on the operator... @@ -66,6 +76,7 @@ class ExportOperator(bpy.types.Operator): # The crazy mess we're doing with props on the fly means we have to explicitly draw them :( layout.prop(self, "version") + layout.prop(age, "texcache_method", text="") layout.prop(age, "bake_lighting") row = layout.row() row.enabled = ConsoleToggler.is_platform_supported() diff --git a/korman/properties/__init__.py b/korman/properties/__init__.py index aed2561..4da436b 100644 --- a/korman/properties/__init__.py +++ b/korman/properties/__init__.py @@ -16,6 +16,7 @@ import bpy from .prop_camera import * +from .prop_image import * from .prop_lamp import * from . import modifiers from .prop_object import * @@ -25,6 +26,7 @@ from .prop_world import * def register(): bpy.types.Camera.plasma_camera = bpy.props.PointerProperty(type=PlasmaCamera) + bpy.types.Image.plasma_image = bpy.props.PointerProperty(type=PlasmaImage) bpy.types.Lamp.plasma_lamp = bpy.props.PointerProperty(type=PlasmaLamp) bpy.types.Object.plasma_net = bpy.props.PointerProperty(type=PlasmaNet) bpy.types.Object.plasma_object = bpy.props.PointerProperty(type=PlasmaObject) diff --git a/korman/properties/modifiers/render.py b/korman/properties/modifiers/render.py index 5fd2786..00cf8a0 100644 --- a/korman/properties/modifiers/render.py +++ b/korman/properties/modifiers/render.py @@ -215,6 +215,7 @@ class PlasmaLightMapGen(idprops.IDPropMixin, PlasmaModifierProperties): # Mmm... cheating mat_mgr.export_prepared_image(owner=layer, image=lightmap_im, allowed_formats={"PNG", "DDS"}, + ephemeral=True, indent=2) @classmethod diff --git a/korman/properties/prop_image.py b/korman/properties/prop_image.py new file mode 100644 index 0000000..14646d7 --- /dev/null +++ b/korman/properties/prop_image.py @@ -0,0 +1,26 @@ +# This file is part of Korman. +# +# Korman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Korman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Korman. If not, see . + +import bpy +from bpy.props import * + +class PlasmaImage(bpy.types.PropertyGroup): + texcache_method = EnumProperty(name="Texture Cache", + description="Texture Cache Settings", + items=[("skip", "Don't Cache Image", "This image is never cached."), + ("use", "Use Image Cache", "This image should be cached."), + ("rebuild", "Refresh Image Cache", "Forces this image to be recached on the next export.")], + default="use", + options=set()) diff --git a/korman/ui/__init__.py b/korman/ui/__init__.py index dde0d43..d1ef238 100644 --- a/korman/ui/__init__.py +++ b/korman/ui/__init__.py @@ -14,6 +14,7 @@ # along with Korman. If not, see . from .ui_camera import * +from .ui_image import * from .ui_lamp import * from .ui_list import * from .ui_menus import * diff --git a/korman/ui/ui_image.py b/korman/ui/ui_image.py new file mode 100644 index 0000000..d50a895 --- /dev/null +++ b/korman/ui/ui_image.py @@ -0,0 +1,25 @@ +# This file is part of Korman. +# +# Korman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Korman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Korman. If not, see . + +import bpy + +class PlasmaImageEditorHeader(bpy.types.Header): + bl_space_type = "IMAGE_EDITOR" + + def draw(self, context): + layout, image = self.layout, context.space_data.image + settings = image.plasma_image + + layout.prop(settings, "texcache_method", text="") diff --git a/korman/ui/ui_world.py b/korman/ui/ui_world.py index 333dbbc..6cac4e4 100644 --- a/korman/ui/ui_world.py +++ b/korman/ui/ui_world.py @@ -133,6 +133,7 @@ class PlasmaAgePanel(AgeButtonsPanel, bpy.types.Panel): col = split.column() col.label("Export Settings:") + col.prop(age, "texcache_method", text="") col.prop(age, "bake_lighting") cons_ui = col.column() cons_ui.enabled = ConsoleToggler.is_platform_supported()