Browse Source

Merge pull request #117 from Hoikas/texcache

Texture Cache
pull/121/head
Adam Johnson 6 years ago committed by GitHub
parent
commit
2748bc2ab3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      korlib/CMakeLists.txt
  2. 111
      korlib/buffer.cpp
  3. 37
      korlib/buffer.h
  4. 2
      korlib/module.cpp
  5. 38
      korlib/texture.cpp
  6. 26
      korman/exporter/convert.py
  7. 341
      korman/exporter/image.py
  8. 194
      korman/exporter/material.py
  9. 5
      korman/korlib/texture.py
  10. 11
      korman/operators/op_export.py
  11. 2
      korman/properties/__init__.py
  12. 1
      korman/properties/modifiers/render.py
  13. 26
      korman/properties/prop_image.py
  14. 1
      korman/ui/__init__.py
  15. 25
      korman/ui/ui_image.py
  16. 1
      korman/ui/ui_world.py

2
korlib/CMakeLists.txt

@ -18,7 +18,6 @@ endif()
# Da files # Da files
set(korlib_HEADERS set(korlib_HEADERS
buffer.h
bumpmap.h bumpmap.h
korlib.h korlib.h
sound.h sound.h
@ -26,7 +25,6 @@ set(korlib_HEADERS
) )
set(korlib_SOURCES set(korlib_SOURCES
buffer.cpp
bumpmap.cpp bumpmap.cpp
module.cpp module.cpp
sound.cpp sound.cpp

111
korlib/buffer.cpp

@ -1,111 +0,0 @@
/* This file is part of Korman.
*
* Korman is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Korman is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Korman. If not, see <http://www.gnu.org/licenses/>.
*/
#include "buffer.h"
extern "C" {
static void pyBuffer_dealloc(pyBuffer* self) {
delete[] self->m_buffer;
Py_TYPE(self)->tp_free((PyObject*)self);
}
static PyObject* pyBuffer_new(PyTypeObject* type, PyObject* args, PyObject* kwds) {
PyErr_SetString(PyExc_RuntimeError, "Buffers cannot be created by mere mortals");
return NULL;
}
PyTypeObject pyBuffer_Type = {
PyVarObject_HEAD_INIT(NULL, 0)
"_korlib.Buffer", /* tp_name */
sizeof(pyBuffer), /* tp_basicsize */
0, /* tp_itemsize */
(destructor)pyBuffer_dealloc, /* tp_dealloc */
NULL, /* tp_print */
NULL, /* tp_getattr */
NULL, /* tp_setattr */
NULL, /* tp_compare */
NULL, /* tp_repr */
NULL, /* tp_as_number */
NULL, /* tp_as_sequence */
NULL, /* tp_as_mapping */
NULL, /* tp_hash */
NULL, /* tp_call */
NULL, /* tp_str */
NULL, /* tp_getattro */
NULL, /* tp_setattro */
NULL, /* tp_as_buffer */
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */
"Buffer", /* tp_doc */
NULL, /* tp_traverse */
NULL, /* tp_clear */
NULL, /* tp_richcompare */
0, /* tp_weaklistoffset */
NULL, /* tp_iter */
NULL, /* tp_iternext */
NULL, /* tp_methods */
NULL, /* tp_members */
NULL, /* tp_getset */
NULL, /* tp_base */
NULL, /* tp_dict */
NULL, /* tp_descr_get */
NULL, /* tp_descr_set */
0, /* tp_dictoffset */
NULL, /* tp_init */
NULL, /* tp_alloc */
pyBuffer_new, /* tp_new */
NULL, /* tp_free */
NULL, /* tp_is_gc */
NULL, /* tp_bases */
NULL, /* tp_mro */
NULL, /* tp_cache */
NULL, /* tp_subclasses */
NULL, /* tp_weaklist */
NULL, /* tp_del */
0, /* tp_version_tag */
NULL, /* tp_finalize */
};
PyObject* Init_pyBuffer_Type() {
if (PyType_Ready(&pyBuffer_Type) < 0)
return NULL;
Py_INCREF(&pyBuffer_Type);
return (PyObject*)&pyBuffer_Type;
}
int pyBuffer_Check(PyObject* obj) {
if (obj->ob_type == &pyBuffer_Type
|| PyType_IsSubtype(obj->ob_type, &pyBuffer_Type))
return 1;
return 0;
}
PyObject* pyBuffer_Steal(uint8_t* buffer, size_t size) {
pyBuffer* obj = PyObject_New(pyBuffer, &pyBuffer_Type);
obj->m_buffer = buffer;
obj->m_size = size;
return (PyObject*)obj;
}
};

37
korlib/buffer.h

@ -1,37 +0,0 @@
/* This file is part of Korman.
*
* Korman is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Korman is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Korman. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef _KORLIB_BUFFER_H
#define _KORLIB_BUFFER_H
#include "korlib.h"
extern "C" {
typedef struct {
PyObject_HEAD
uint8_t* m_buffer;
size_t m_size;
} pyBuffer;
extern PyTypeObject pyBuffer_Type;
PyObject* Init_pyBuffer_Type();
int pyBuffer_Check(PyObject*);
PyObject* pyBuffer_Steal(uint8_t*, size_t);
}
#endif // _KORLIB_BUFFER_H

2
korlib/module.cpp

@ -14,7 +14,6 @@
* along with Korman. If not, see <http://www.gnu.org/licenses/>. * along with Korman. If not, see <http://www.gnu.org/licenses/>.
*/ */
#include "buffer.h"
#include "bumpmap.h" #include "bumpmap.h"
#include "sound.h" #include "sound.h"
#include "texture.h" #include "texture.h"
@ -44,7 +43,6 @@ PyMODINIT_FUNC PyInit__korlib() {
PyObject* module = PyModule_Create(&korlib_Module); PyObject* module = PyModule_Create(&korlib_Module);
// Module classes... // Module classes...
PyModule_AddObject(module, "Buffer", Init_pyBuffer_Type());
PyModule_AddObject(module, "GLTexture", Init_pyGLTexture_Type()); PyModule_AddObject(module, "GLTexture", Init_pyGLTexture_Type());
return module; return module;

38
korlib/texture.cpp

@ -15,7 +15,6 @@
*/ */
#include "texture.h" #include "texture.h"
#include "buffer.h"
#ifdef _WIN32 #ifdef _WIN32
# define WIN32_LEAN_AND_MEAN # define WIN32_LEAN_AND_MEAN
@ -302,7 +301,7 @@ static PyObject* pyGLTexture_get_level_data(pyGLTexture* self, PyObject* args, P
_LevelData data = _get_level_data(self, level, bgra, report); _LevelData data = _get_level_data(self, level, bgra, report);
if (fast) if (fast)
return pyBuffer_Steal(data.m_data, data.m_dataSize); return PyBytes_FromStringAndSize((char*)data.m_data, data.m_dataSize);
// OpenGL returns a flipped image, so we must reflip it. // OpenGL returns a flipped image, so we must reflip it.
size_t row_stride = data.m_width * 4; size_t row_stride = data.m_width * 4;
@ -331,39 +330,7 @@ static PyObject* pyGLTexture_get_level_data(pyGLTexture* self, PyObject* args, P
data.m_data[i + 3] = (data.m_data[i + 0] + data.m_data[i + 1] + data.m_data[i + 2]) / 3; data.m_data[i + 3] = (data.m_data[i + 0] + data.m_data[i + 1] + data.m_data[i + 2]) / 3;
} }
return pyBuffer_Steal(data.m_data, data.m_dataSize); return PyBytes_FromStringAndSize((char*)data.m_data, data.m_dataSize);
}
static PyObject* pyGLTexture_store_in_mipmap(pyGLTexture* self, PyObject* args) {
pyMipmap* pymipmap;
PyObject* levels;
size_t compression;
if (!PyArg_ParseTuple(args, "OOn", &pymipmap, &levels, &compression) || !PySequence_Check(levels)) {
PyErr_SetString(PyExc_TypeError, "store_in_mipmap expects a plMipmap, sequence of Buffer and int");
return NULL;
}
// Since we actually have no way of knowing if that really is a pyMipmap...
plMipmap* mipmap = plMipmap::Convert(pymipmap->fThis, false);
if (!mipmap) {
PyErr_SetString(PyExc_TypeError, "store_in_mipmap expects a plMipmap, sequence of Buffer and int");
return NULL;
}
for (Py_ssize_t i = 0; i < PySequence_Size(levels); ++i) {
pyBuffer* item = (pyBuffer*)PySequence_GetItem(levels, i);
if (!pyBuffer_Check((PyObject*)item)) {
PyErr_SetString(PyExc_TypeError, "store_in_mipmap expects a plMipmap, sequence of Buffer and int");
return NULL;
}
if (compression == plBitmap::kDirectXCompression)
mipmap->CompressImage(i, item->m_buffer, item->m_size);
else
mipmap->setLevelData(i, item->m_buffer, item->m_size);
}
Py_RETURN_NONE;
} }
static PyMethodDef pyGLTexture_Methods[] = { static PyMethodDef pyGLTexture_Methods[] = {
@ -372,7 +339,6 @@ static PyMethodDef pyGLTexture_Methods[] = {
{ _pycs("generate_mipmap"), (PyCFunction)pyGLTexture_generate_mipmap, METH_NOARGS, NULL }, { _pycs("generate_mipmap"), (PyCFunction)pyGLTexture_generate_mipmap, METH_NOARGS, NULL },
{ _pycs("get_level_data"), (PyCFunction)pyGLTexture_get_level_data, METH_KEYWORDS | METH_VARARGS, NULL }, { _pycs("get_level_data"), (PyCFunction)pyGLTexture_get_level_data, METH_KEYWORDS | METH_VARARGS, NULL },
{ _pycs("store_in_mipmap"), (PyCFunction)pyGLTexture_store_in_mipmap, METH_VARARGS, NULL },
{ NULL, NULL, 0, NULL } { NULL, NULL, 0, NULL }
}; };

26
korman/exporter/convert.py

@ -23,6 +23,7 @@ from . import animation
from . import camera from . import camera
from . import explosions from . import explosions
from . import etlight from . import etlight
from . import image
from . import logger from . import logger
from . import manager from . import manager
from . import mesh from . import mesh
@ -54,6 +55,7 @@ class Exporter:
self.animation = animation.AnimationConverter(self) self.animation = animation.AnimationConverter(self)
self.sumfile = sumfile.SumFile() self.sumfile = sumfile.SumFile()
self.camera = camera.CameraConverter(self) self.camera = camera.CameraConverter(self)
self.image = image.ImageCache(self)
# Step 0.8: Init the progress mgr # Step 0.8: Init the progress mgr
self.mesh.add_progress_presteps(self.report) self.mesh.add_progress_presteps(self.report)
@ -66,6 +68,7 @@ class Exporter:
self.report.progress_add_step("Finalizing Plasma Logic") self.report.progress_add_step("Finalizing Plasma Logic")
self.report.progress_add_step("Exporting Textures") self.report.progress_add_step("Exporting Textures")
self.report.progress_add_step("Composing Geometry") self.report.progress_add_step("Composing Geometry")
self.report.progress_add_step("Saving Age Files")
self.report.progress_start("EXPORTING AGE") self.report.progress_start("EXPORTING AGE")
# Step 0.9: Apply modifiers to all meshes temporarily. # Step 0.9: Apply modifiers to all meshes temporarily.
@ -101,7 +104,7 @@ class Exporter:
self.mesh.finalize() self.mesh.finalize()
# Step 5: FINALLY. Let's write the PRPs and crap. # Step 5: FINALLY. Let's write the PRPs and crap.
self.mgr.save_age(Path(self._op.filepath)) self._save_age()
# Step 5.1: Save out the export report. # Step 5.1: Save out the export report.
# If the export fails and this doesn't save, we have bigger problems than # If the export fails and this doesn't save, we have bigger problems than
@ -340,3 +343,24 @@ class Exporter:
if proc is not None: if proc is not None:
proc(self, bl_obj, sceneobject) proc(self, bl_obj, sceneobject)
inc_progress() inc_progress()
def _save_age(self):
self.report.progress_advance()
self.mgr.save_age(Path(self._op.filepath))
self.image.save()
@property
def texcache_path(self):
age = bpy.context.scene.world.plasma_age
filepath = age.texcache_path
if not filepath or not Path(filepath).is_file():
filepath = bpy.context.blend_data.filepath
if not filepath:
filepath = self.filepath
filepath = str(Path(filepath).with_suffix(".ktc"))
age.texcache_path = filepath
return filepath
@property
def texcache_method(self):
return bpy.context.scene.world.plasma_age.texcache_method

341
korman/exporter/image.py

@ -0,0 +1,341 @@
# This file is part of Korman.
#
# Korman is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Korman is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Korman. If not, see <http://www.gnu.org/licenses/>.
import enum
from pathlib import Path
from PyHSPlasma import *
import time
import weakref
_HEADER_MAGICK = b"KTH\x00"
_INDEX_MAGICK = b"KTI\x00"
_DATA_MAGICK = b"KTC\x00"
_ENTRY_MAGICK = b"KTE\x00"
_IMAGE_MAGICK = b"KTT\x00"
_MIP_MAGICK = b"KTM\x00"
@enum.unique
class _HeaderBits(enum.IntEnum):
last_export = 0
index_pos = 1
@enum.unique
class _IndexBits(enum.IntEnum):
image_count = 0
@enum.unique
class _EntryBits(enum.IntEnum):
image_name = 0
mip_levels = 1
image_pos = 2
compression = 3
source_size = 4
export_size = 5
last_export = 6
class _CachedImage:
def __init__(self):
self.name = None
self.mip_levels = 1
self.data_pos = None
self.image_data = None
self.source_size = None
self.export_size = None
self.compression = None
self.export_time = None
self.modify_time = None
def __str__(self):
return self.name
class ImageCache:
def __init__(self, exporter):
self._exporter = weakref.ref(exporter)
self._images = {}
self._read_stream = hsFileStream()
self._stream_handles = 0
def add_texture(self, texture, num_levels, export_size, compression, data):
image = texture.image
image_name = str(texture)
key = (image_name, compression)
ex_method, im_method = self._exporter().texcache_method, image.plasma_image.texcache_method
method = set((ex_method, im_method))
if texture.ephemeral or "skip" in method:
self._images.pop(key, None)
return
elif im_method == "rebuild":
image.plasma_image.texcache_method = "use"
image = _CachedImage()
image.name = image_name
image.mip_levels = num_levels
image.compression = compression
image.source_size = texture.image.size
image.export_size = export_size
image.image_data = data
self._images[key] = image
def _compact(self):
for key, image in self._images.copy().items():
if image.image_data is None:
self._images.pop(key)
def __enter__(self):
if self._stream_handles == 0:
path = self._exporter().texcache_path
if Path(path).is_file():
self._read_stream.open(path, fmRead)
self._stream_handles += 1
return self
def __exit__(self, type, value, tb):
self._stream_handles -= 1
if self._stream_handles == 0:
self._read_stream.close()
def get_from_texture(self, texture, compression):
bl_image = texture.image
# If the texture is ephemeral (eg a lightmap) or has been marked "rebuild" or "skip"
# in the UI, we don't want anything from the cache. In the first two cases, we never
# want to cache that crap. In the latter case, we just want to signal a recache is needed.
ex_method, im_method = self._exporter().texcache_method, texture.image.plasma_image.texcache_method
method = set((ex_method, im_method))
if method != {"use"} or texture.ephemeral:
return None
key = (str(texture), compression)
cached_image = self._images.get(key)
if cached_image is None:
return None
# ensure the texture key generally matches up with our copy of this image.
# if not, a recache will likely be triggered implicitly.
if tuple(bl_image.size) != cached_image.source_size:
return None
# if the image is on the disk, we can check the its modify time for changes
if cached_image.modify_time is None:
# if the image is packed, the filepath will be some garbage beginning with
# the string "//". There isn't much we can do with that, unless the user
# happens to have an unpacked copy lying around somewheres...
path = Path(bl_image.filepath_from_user())
if path.is_file():
cached_image.modify_time = path.stat().st_mtime
if cached_image.export_time and cached_image.export_time < cached_image.modify_time:
return None
else:
cached_image.modify_time = 0
# ensure the data has been loaded from the cache
if cached_image.image_data is None:
try:
cached_image.image_data = tuple(self._read_image_data(cached_image, self._read_stream))
except AssertionError:
self._report.warn("Cached copy of '{}' is corrupt and will be discarded", cached_image.name, indent=2)
self._images.pop(key)
return None
return cached_image
def load(self):
if self._exporter().texcache_method == "skip":
return
try:
with self:
self._read(self._read_stream)
except AssertionError:
self._report.warn("Texture Cache is corrupt and will be regenerated")
self._images.clear()
def _read(self, stream):
if stream.size == 0:
return
stream.seek(0)
assert stream.read(4) == _HEADER_MAGICK
# if we use a bit vector to define our header strcture, we can add
# new fields without having to up the file version, trashing old
# texture cache files... :)
flags = hsBitVector()
flags.read(stream)
# ALWAYS ADD NEW FIELDS TO THE END OF THIS SECTION!!!!!!!
if flags[_HeaderBits.last_export]:
self.last_export = stream.readDouble()
if flags[_HeaderBits.index_pos]:
index_pos = stream.readInt()
self._read_index(index_pos, stream)
def _read_image_data(self, image, stream):
if image.data_pos is None:
return None
assert stream.size > 0
stream.seek(image.data_pos)
assert stream.read(4) == _IMAGE_MAGICK
# unused currently
image_flags = hsBitVector()
image_flags.read(stream)
# given this is a generator, someone else might change our stream position
# between iterations, so we'd best bookkeep the position
pos = stream.pos
for i in range(image.mip_levels):
if stream.pos != pos:
stream.seek(pos)
assert stream.read(4) == _MIP_MAGICK
# this should only ever be image data...
# store your flags somewhere else!
size = stream.readInt()
data = stream.read(size)
pos = stream.pos
yield data
def _read_index(self, index_pos, stream):
stream.seek(index_pos)
assert stream.read(4) == _INDEX_MAGICK
# See above, can change the index format easily...
flags = hsBitVector()
flags.read(stream)
# ALWAYS ADD NEW FIELDS TO THE END OF THIS SECTION!!!!!!!
image_count = stream.readInt() if flags[_IndexBits.image_count] else 0
# Here begins the image map
assert stream.read(4) == _DATA_MAGICK
for i in range(image_count):
self._read_index_entry(stream)
def _read_index_entry(self, stream):
assert stream.read(4) == _ENTRY_MAGICK
image = _CachedImage()
# See above, can change the entry format easily...
flags = hsBitVector()
flags.read(stream)
# ALWAYS ADD NEW FIELDS TO THE END OF THIS SECTION!!!!!!!
if flags[_EntryBits.image_name]:
image.name = stream.readSafeWStr()
if flags[_EntryBits.mip_levels]:
image.mip_levels = stream.readByte()
if flags[_EntryBits.image_pos]:
image.data_pos = stream.readInt()
if flags[_EntryBits.compression]:
image.compression = stream.readByte()
if flags[_EntryBits.source_size]:
image.source_size = (stream.readInt(), stream.readInt())
if flags[_EntryBits.export_size]:
image.export_size = (stream.readInt(), stream.readInt())
if flags[_EntryBits.last_export]:
image.export_time = stream.readDouble()
# do we need to check for duplicate images?
self._images[(image.name, image.compression)] = image
@property
def _report(self):
return self._exporter().report
def save(self):
if self._exporter().texcache_method == "skip":
return
# TODO: add a way to preserve unused images for a brief period so we don't toss
# already cached images that are only removed from the age temporarily...
self._compact()
# Assume all read operations are done (don't be within' my cache while you savin')
assert self._stream_handles == 0
with hsFileStream().open(self._exporter().texcache_path, fmWrite) as stream:
self._write(stream)
def _write(self, stream):
flags = hsBitVector()
flags[_HeaderBits.index_pos] = True
stream.seek(0)
stream.write(_HEADER_MAGICK)
flags.write(stream)
header_index_pos = stream.pos
stream.writeInt(-1)
for image in self._images.values():
self._write_image_data(image, stream)
# fix the index position
index_pos = stream.pos
self._write_index(stream)
stream.seek(header_index_pos)
stream.writeInt(index_pos)
def _write_image_data(self, image, stream):
# unused currently
flags = hsBitVector()
image.data_pos = stream.pos
stream.write(_IMAGE_MAGICK)
flags.write(stream)
for i in image.image_data:
stream.write(_MIP_MAGICK)
stream.writeInt(len(i))
stream.write(i)
def _write_index(self, stream):
flags = hsBitVector()
flags[_IndexBits.image_count] = True
pos = stream.pos
stream.write(_INDEX_MAGICK)
flags.write(stream)
stream.writeInt(len(self._images))
stream.write(_DATA_MAGICK)
for image in self._images.values():
self._write_index_entry(image, stream)
return pos
def _write_index_entry(self, image, stream):
flags = hsBitVector()
flags[_EntryBits.image_name] = True
flags[_EntryBits.mip_levels] = True
flags[_EntryBits.image_pos] = True
flags[_EntryBits.compression] = True
flags[_EntryBits.source_size] = True
flags[_EntryBits.export_size] = True
flags[_EntryBits.last_export] = True
stream.write(_ENTRY_MAGICK)
flags.write(stream)
stream.writeSafeWStr(str(image))
stream.writeByte(image.mip_levels)
stream.writeInt(image.data_pos)
stream.writeByte(image.compression)
stream.writeInt(image.source_size[0])
stream.writeInt(image.source_size[1])
stream.writeInt(image.export_size[0])
stream.writeInt(image.export_size[1])
stream.writeDouble(time.time())

194
korman/exporter/material.py

@ -82,6 +82,7 @@ class _Texture:
else: else:
self.auto_ext = "hsm" self.auto_ext = "hsm"
self.extension = kwargs.get("extension", self.auto_ext) self.extension = kwargs.get("extension", self.auto_ext)
self.ephemeral = kwargs.get("ephemeral", False)
self.image = image self.image = image
def __eq__(self, other): def __eq__(self, other):
@ -653,6 +654,7 @@ class MaterialConverter:
to use the image datablock extension, set this to None to use the image datablock extension, set this to None
- indent: (optional) indentation level for log messages - indent: (optional) indentation level for log messages
default: 2 default: 2
- ephemeral: (optional) never cache this image
""" """
owner = kwargs.pop("owner", None) owner = kwargs.pop("owner", None)
indent = kwargs.pop("indent", 2) indent = kwargs.pop("indent", 2)
@ -672,91 +674,117 @@ class MaterialConverter:
inc_progress = self._report.progress_increment inc_progress = self._report.progress_increment
mgr = self._mgr mgr = self._mgr
for key, owners in self._pending.items(): # This with statement causes the texture cache to hold open a
name = str(key) # read stream for the cache file, preventing spurious open-close
self._report.msg("\n[Mipmap '{}']", name) # spin washing during this tight loop. Note that the cache still
# has to actually be loaded ^_^
image = key.image with self._texcache as texcache:
oWidth, oHeight = image.size texcache.load()
if oWidth == 0 and oHeight == 0:
raise ExportError("Image '{}' could not be loaded.".format(image.name)) for key, owners in self._pending.items():
name = str(key)
eWidth = helpers.ensure_power_of_two(oWidth) self._report.msg("\n[Mipmap '{}']", name)
eHeight = helpers.ensure_power_of_two(oHeight)
if (eWidth != oWidth) or (eHeight != oHeight): image = key.image
self._report.msg("Image is not a POT ({}x{}) resizing to {}x{}", oWidth, oHeight = image.size
oWidth, oHeight, eWidth, eHeight, indent=1) if oWidth == 0 and oHeight == 0:
self._resize_image(image, eWidth, eHeight) raise ExportError("Image '{}' could not be loaded.".format(image.name))
# Now we try to use the pile of hints we were given to figure out what format to use # Now we try to use the pile of hints we were given to figure out what format to use
allowed_formats = key.allowed_formats allowed_formats = key.allowed_formats
if key.mipmap: if key.mipmap:
compression = plBitmap.kDirectXCompression compression = plBitmap.kDirectXCompression
elif "PNG" in allowed_formats and self._mgr.getVer() == pvMoul: elif "PNG" in allowed_formats and self._mgr.getVer() == pvMoul:
compression = plBitmap.kPNGCompression compression = plBitmap.kPNGCompression
elif "DDS" in allowed_formats: elif "DDS" in allowed_formats:
compression = plBitmap.kDirectXCompression compression = plBitmap.kDirectXCompression
elif "JPG" in allowed_formats: elif "JPG" in allowed_formats:
compression = plBitmap.kJPEGCompression compression = plBitmap.kJPEGCompression
elif "BMP" in allowed_formats: elif "BMP" in allowed_formats:
compression = plBitmap.kUncompressed compression = plBitmap.kUncompressed
else:
raise RuntimeError(allowed_formats)
dxt = plBitmap.kDXT5 if key.use_alpha or key.calc_alpha else plBitmap.kDXT1
# Grab the image data from OpenGL and stuff it into the plBitmap
helper = GLTexture(key)
with helper as glimage:
if compression == plBitmap.kDirectXCompression:
numLevels = glimage.num_levels
self._report.msg("Generating mip levels", indent=1)
glimage.generate_mipmap()
else: else:
numLevels = 1 raise RuntimeError(allowed_formats)
self._report.msg("Stuffing image data", indent=1) dxt = plBitmap.kDXT5 if key.use_alpha or key.calc_alpha else plBitmap.kDXT1
# Non-DXT images are BGRA in Plasma # Mayhaps we have a cached version of this that has already been exported
fmt = compression != plBitmap.kDirectXCompression cached_image = texcache.get_from_texture(key, compression)
# Hold the uncompressed level data for now. We may have to make multiple copies of if cached_image is None:
# this mipmap for per-page textures :( eWidth = helpers.ensure_power_of_two(oWidth)
data = [] eHeight = helpers.ensure_power_of_two(oHeight)
for i in range(numLevels): if (eWidth != oWidth) or (eHeight != oHeight):
data.append(glimage.get_level_data(i, key.calc_alpha, fmt, report=self._report)) self._report.msg("Image is not a POT ({}x{}) resizing to {}x{}",
oWidth, oHeight, eWidth, eHeight, indent=1)
# Be a good citizen and reset the Blender Image to pre-futzing state self._resize_image(image, eWidth, eHeight)
image.reload()
# Grab the image data from OpenGL and stuff it into the plBitmap
# Now we poke our new bitmap into the pending layers. Note that we have to do some funny helper = GLTexture(key)
# business to account for per-page textures with helper as glimage:
pages = {} if compression == plBitmap.kDirectXCompression:
numLevels = glimage.num_levels
self._report.msg("Adding to...", indent=1) self._report.msg("Generating mip levels", indent=1)
for owner_key in owners: glimage.generate_mipmap()
owner = owner_key.object else:
self._report.msg("[{} '{}']", owner.ClassName()[2:], owner_key.name, indent=2) numLevels = 1
page = mgr.get_textures_page(owner_key) # Layer's page or Textures.prp self._report.msg("Compressing single level", indent=1)
# If we haven't created this plMipmap in the page (either layer's page or Textures.prp), # Non-DXT images are BGRA in Plasma
# then we need to do that and stuff the level data. This is a little tedious, but we fmt = compression != plBitmap.kDirectXCompression
# need to be careful to manage our resources correctly
if page not in pages: # Hold the uncompressed level data for now. We may have to make multiple copies of
mipmap = plMipmap(name=name, width=eWidth, height=eHeight, numLevels=numLevels, # this mipmap for per-page textures :(
compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt) data = []
helper.store_in_mipmap(mipmap, data, compression) for i in range(numLevels):
mgr.AddObject(page, mipmap) data.append(glimage.get_level_data(i, key.calc_alpha, fmt, report=self._report))
pages[page] = mipmap
# Be a good citizen and reset the Blender Image to pre-futzing state
image.reload()
# If this is a DXT-compressed mipmap, we need to use a temporary mipmap
# to do the compression. We'll then steal the data from it.
if compression == plBitmap.kDirectXCompression:
mipmap = plMipmap(name=name, width=eWidth, height=eHeight, numLevels=numLevels,
compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt)
for i in range(numLevels):
mipmap.CompressImage(i, data[i])
data[i] = mipmap.getLevel(i)
texcache.add_texture(key, numLevels, (eWidth, eHeight), compression, data)
else: else:
mipmap = pages[page] eWidth, eHeight = cached_image.export_size
data = cached_image.image_data
numLevels = cached_image.mip_levels
# Now we poke our new bitmap into the pending layers. Note that we have to do some funny
# business to account for per-page textures
pages = {}
self._report.msg("Adding to...", indent=1)
for owner_key in owners:
owner = owner_key.object
self._report.msg("[{} '{}']", owner.ClassName()[2:], owner_key.name, indent=2)
page = mgr.get_textures_page(owner_key) # Layer's page or Textures.prp
# If we haven't created this plMipmap in the page (either layer's page or Textures.prp),
# then we need to do that and stuff the level data. This is a little tedious, but we
# need to be careful to manage our resources correctly
if page not in pages:
mipmap = plMipmap(name=name, width=eWidth, height=eHeight, numLevels=numLevels,
compType=compression, format=plBitmap.kRGB8888, dxtLevel=dxt)
for i, buf in enumerate(data):
mipmap.setLevel(i, buf)
mgr.AddObject(page, mipmap)
pages[page] = mipmap
else:
mipmap = pages[page]
if isinstance(owner, plLayerInterface): if isinstance(owner, plLayerInterface):
owner.texture = mipmap.key owner.texture = mipmap.key
elif isinstance(owner, plImageLibMod): elif isinstance(owner, plImageLibMod):
owner.addImage(mipmap.key) owner.addImage(mipmap.key)
else: else:
raise RuntimeError(owner.ClassName()) raise RuntimeError(owner.ClassName())
inc_progress() inc_progress()
def get_materials(self, bo): def get_materials(self, bo):
return self._obj2mat.get(bo, []) return self._obj2mat.get(bo, [])
@ -843,3 +871,7 @@ class MaterialConverter:
self._alphatest[image] = result self._alphatest[image] = result
return result return result
@property
def _texcache(self):
return self._exporter().image

5
korman/korlib/texture.py

@ -181,8 +181,3 @@ class GLTexture:
# texture in a single pixel?" # texture in a single pixel?"
# :) # :)
return max(numLevels - 2, 2) return max(numLevels - 2, 2)
def store_in_mipmap(self, mipmap, data, compression):
func = mipmap.CompressImage if compression == plBitmap.kDirectXCompression else mipmap.setLevel
for i, level in enumerate(data):
func(i, level)

11
korman/operators/op_export.py

@ -48,6 +48,16 @@ class ExportOperator(bpy.types.Operator):
"show_console": (BoolProperty, {"name": "Display Log Console", "show_console": (BoolProperty, {"name": "Display Log Console",
"description": "Forces the Blender System Console open during the export", "description": "Forces the Blender System Console open during the export",
"default": True}), "default": True}),
"texcache_path": (StringProperty, {"name": "Texture Cache Path",
"description": "Texture Cache Filepath"}),
"texcache_method": (EnumProperty, {"name": "Texture Cache",
"description": "Texture Cache Settings",
"items": [("skip", "Don't Use Texture Cache", "The texture cache is neither used nor updated."),
("use", "Use Texture Cache", "Use (and update, if needed) cached textures."),
("rebuild", "Rebuild Texture Cache", "Rebuilds the texture cache from scratch.")],
"default": "use"}),
} }
# This wigs out and very bad things happen if it's not directly on the operator... # This wigs out and very bad things happen if it's not directly on the operator...
@ -66,6 +76,7 @@ class ExportOperator(bpy.types.Operator):
# The crazy mess we're doing with props on the fly means we have to explicitly draw them :( # The crazy mess we're doing with props on the fly means we have to explicitly draw them :(
layout.prop(self, "version") layout.prop(self, "version")
layout.prop(age, "texcache_method", text="")
layout.prop(age, "bake_lighting") layout.prop(age, "bake_lighting")
row = layout.row() row = layout.row()
row.enabled = ConsoleToggler.is_platform_supported() row.enabled = ConsoleToggler.is_platform_supported()

2
korman/properties/__init__.py

@ -16,6 +16,7 @@
import bpy import bpy
from .prop_camera import * from .prop_camera import *
from .prop_image import *
from .prop_lamp import * from .prop_lamp import *
from . import modifiers from . import modifiers
from .prop_object import * from .prop_object import *
@ -25,6 +26,7 @@ from .prop_world import *
def register(): def register():
bpy.types.Camera.plasma_camera = bpy.props.PointerProperty(type=PlasmaCamera) bpy.types.Camera.plasma_camera = bpy.props.PointerProperty(type=PlasmaCamera)
bpy.types.Image.plasma_image = bpy.props.PointerProperty(type=PlasmaImage)
bpy.types.Lamp.plasma_lamp = bpy.props.PointerProperty(type=PlasmaLamp) bpy.types.Lamp.plasma_lamp = bpy.props.PointerProperty(type=PlasmaLamp)
bpy.types.Object.plasma_net = bpy.props.PointerProperty(type=PlasmaNet) bpy.types.Object.plasma_net = bpy.props.PointerProperty(type=PlasmaNet)
bpy.types.Object.plasma_object = bpy.props.PointerProperty(type=PlasmaObject) bpy.types.Object.plasma_object = bpy.props.PointerProperty(type=PlasmaObject)

1
korman/properties/modifiers/render.py

@ -215,6 +215,7 @@ class PlasmaLightMapGen(idprops.IDPropMixin, PlasmaModifierProperties):
# Mmm... cheating # Mmm... cheating
mat_mgr.export_prepared_image(owner=layer, image=lightmap_im, mat_mgr.export_prepared_image(owner=layer, image=lightmap_im,
allowed_formats={"PNG", "DDS"}, allowed_formats={"PNG", "DDS"},
ephemeral=True,
indent=2) indent=2)
@classmethod @classmethod

26
korman/properties/prop_image.py

@ -0,0 +1,26 @@
# This file is part of Korman.
#
# Korman is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Korman is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Korman. If not, see <http://www.gnu.org/licenses/>.
import bpy
from bpy.props import *
class PlasmaImage(bpy.types.PropertyGroup):
texcache_method = EnumProperty(name="Texture Cache",
description="Texture Cache Settings",
items=[("skip", "Don't Cache Image", "This image is never cached."),
("use", "Use Image Cache", "This image should be cached."),
("rebuild", "Refresh Image Cache", "Forces this image to be recached on the next export.")],
default="use",
options=set())

1
korman/ui/__init__.py

@ -14,6 +14,7 @@
# along with Korman. If not, see <http://www.gnu.org/licenses/>. # along with Korman. If not, see <http://www.gnu.org/licenses/>.
from .ui_camera import * from .ui_camera import *
from .ui_image import *
from .ui_lamp import * from .ui_lamp import *
from .ui_list import * from .ui_list import *
from .ui_menus import * from .ui_menus import *

25
korman/ui/ui_image.py

@ -0,0 +1,25 @@
# This file is part of Korman.
#
# Korman is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Korman is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Korman. If not, see <http://www.gnu.org/licenses/>.
import bpy
class PlasmaImageEditorHeader(bpy.types.Header):
bl_space_type = "IMAGE_EDITOR"
def draw(self, context):
layout, image = self.layout, context.space_data.image
settings = image.plasma_image
layout.prop(settings, "texcache_method", text="")

1
korman/ui/ui_world.py

@ -133,6 +133,7 @@ class PlasmaAgePanel(AgeButtonsPanel, bpy.types.Panel):
col = split.column() col = split.column()
col.label("Export Settings:") col.label("Export Settings:")
col.prop(age, "texcache_method", text="")
col.prop(age, "bake_lighting") col.prop(age, "bake_lighting")
cons_ui = col.column() cons_ui = col.column()
cons_ui.enabled = ConsoleToggler.is_platform_supported() cons_ui.enabled = ConsoleToggler.is_platform_supported()

Loading…
Cancel
Save