Rework texture loading and caching system:

* Create a cache module to query and load cached files without having to load or
download the original file, as well as comparing changed time and size.
* Change `loadUri` to allow fetching the modification time and size of a file.
* Change `onload` of `loadable` to have a single `FetchResult` objects instead
  of many arguments (ok, err, data, time, size).
* Removed Pixie support.
* Added a buffer copy to simplify potentially problematic code.
* Add cache_key to newTexture when loading from buffer.
* For packed images, use the name of the blend file and the image as cache_key.
This commit is contained in:
Alberto Torres 2025-01-25 14:25:26 +01:00
parent d903789389
commit f898f0b7d8
8 changed files with 388 additions and 154 deletions

View file

@ -45,7 +45,10 @@
{.hint[ConvFromXtoItselfNotNeeded]:off.}
import std/tables
import std/times
import std/os
import arr_ref # for SliceMem
import sugar
type LoadableResourceStatus* = enum
NotStarted
@ -53,16 +56,20 @@ type LoadableResourceStatus* = enum
Finished
Error
# type Result = tuple[ok: bool, err: string, data: SliceMem[byte]]
type FetchResult* = object
ok*: bool
err*: string
data*: SliceMem[byte]
size*: int
modified*: Time
type LoadableResource* = ref object of RootObj
status: LoadableResourceStatus
start_func: proc(self: LoadableResource) {.closure.}
onload_func: proc(ok: bool, err: string, data: SliceMem[byte]) {.closure.}
onload_func: proc(res: FetchResult) {.closure.}
cancel_func: proc() {.closure.}
str*: proc(): string
use_threads: bool
# result: ref Result # only to be used with loadAll
proc newLoadableResource*[T: LoadableResource](
start: proc(self: LoadableResource),
@ -80,7 +87,7 @@ when compileOption("threads"):
# main -> thread channels
var to_start: Channel[LoadableResource]
# main <- thread channels
var to_return: Channel[(LoadableResource, bool, string, SliceMem[byte])]
var to_return: Channel[(LoadableResource, FetchResult)]
proc start*[T: LoadableResource](self: T) =
self.status = Started
@ -89,20 +96,18 @@ proc start*[T: LoadableResource](self: T) =
else:
self.start_func(self)
proc `onload=`*[T: LoadableResource](self: T, onload_func: proc(ok: bool, err: string, data: SliceMem[byte])) =
proc `onload=`*[T: LoadableResource](self: T, onload_func: proc(res: FetchResult)) =
self.onload_func = onload_func
proc onload*[T: LoadableResource](self: T, ok: bool, err: string, data = SliceMem[byte]()) =
proc onload*[T: LoadableResource](self: T, res: FetchResult) =
if self.status == Started:
self.status = if ok: Finished else: Error
# if self.result != nil:
# self.result[] = (ok, err, data)
self.status = if res.ok: Finished else: Error
if self.onload_func != nil:
if self.use_threads:
when compileOption("threads"):
to_return.send((self.LoadableResource, ok, err, data))
to_return.send((self.LoadableResource, res))
else:
self.onload_func(ok, err, data)
self.onload_func(res)
proc cancel*[T: LoadableResource](self: T) =
if self.status != Started:
@ -145,8 +150,8 @@ when compileOption("threads"):
let tried = to_return.tryRecv()
if not tried.dataAvailable:
break
let (res, ok, err, data) = tried.msg
res.onload_func(ok, err, data)
let (res, data) = tried.msg
res.onload_func(data)
proc terminateLoadableWorkerThreads*() =
# TODO: test this
@ -222,6 +227,7 @@ when not defined(onlyLocalFiles):
else:
import std/httpclient
import std/httpcore
import std/memfiles
import std/strutils
@ -252,10 +258,11 @@ proc registerLogUriHandler*(handler: proc(uri: string)) =
proc loadUri*(
uri: string,
onload_func: proc(ok: bool, err: string, data: SliceMem[byte]) = nil,
onload_func: proc(res: FetchResult) = nil,
range = (-1,-1),
auto_start = true,
use_threads = true,
headers_only = false,
): Fetch {.discardable.} =
if log_uri_handler != nil:
@ -286,6 +293,7 @@ proc loadUri*(
if is_remote:
uri = uri.escapeUTF8
when defined(emscripten):
assert not headers_only, "TODO: emscripten HEAD and parsing header"
use_threads = false # API is already threaded
start_func = proc(self: LoadableResource) =
var attr: emscripten_fetch_attr_t
@ -308,18 +316,28 @@ proc loadUri*(
discard emscripten_fetch(attr, uri.cstring)
else:
var client = newHttpClient()
var response: string
var content: string
start_func = proc(self: LoadableResource) =
let self = cast[Fetch](self)
var ok = false
var time: Time
var size: int
try:
response = client.getContent(uri)
let response = client.request(uri, if headers_only: HttpHead else: HttpGet)
content = response.body
try: time = parse(response.headers["last-modified"], "ddd, dd MMM yyyy HH:mm:ss 'GMT'").toTime
except: discard
try: size = parseInt(response.headers["content-length"])
except: discard
ok = true
except:
self.onload(false, &"Error fetching {uri}: {getCurrentExceptionMsg()}")
self.onload(FetchResult(err: &"Error fetching {uri}: {getCurrentExceptionMsg()}"))
if ok:
self.onload(true, "", newSliceMem(response[0].addr.pointer, response.len, proc() =
discard response
self.onload(FetchResult(
ok: true,
data: toSliceMem(content).to(byte),
size: if headers_only: size else: content.len,
modified: time,
))
if not is_remote:
@ -327,15 +345,38 @@ proc loadUri*(
# TODO: also test if there's better perf in local
start_func = proc(self: LoadableResource) =
try:
var memfile = memfiles.open(uri, mode=fmRead)
self.onload(true, "", newSliceMem(memfile.mem, memfile.size, proc() =
try:
memfile.close()
# this should never happen but destructors require it
except OSError: discard
))
if not uri.fileExists():
self.onload(FetchResult(
ok: false,
err: "File does not exist: " & uri,
))
return
var time: Time
var size: int
try: time = uri.getLastModificationTime()
except: discard
if headers_only:
try: size = uri.getFileSize().int
except: discard
self.onload(FetchResult(
ok: true,
size: size,
modified: time,
))
else:
var memfile = memfiles.open(uri, mode=fmRead)
self.onload(FetchResult(
ok: true,
data: newSliceMem(memfile.mem, memfile.size, proc() =
try:
memfile.close()
# this should never happen but destructors require it
except OSError: discard),
size: memfile.size,
modified: time,
))
except OSError:
self.onload(false, "Could not open file: " & uri)
self.onload(FetchResult(err: "Could not open file: " & uri))
proc str(): string = uri
self = newLoadableResource[Fetch](start_func, str, use_threads=use_threads)
self.onload_func = onload_func

202
src/gpu_formats/cache.nim Normal file
View file

@ -0,0 +1,202 @@
import std/strutils
import std/hashes
import std/tables
import std/locks
import std/times
import std/json
import std/uri
import std/os
import arr_ref
import loadable
export arr_ref
import sugar
type CacheResult* = object
ok*: bool
err*: string
orig_data*, cached_data*: SliceMem[byte]
type CacheCallback* = proc(res: CacheResult)
type CacheEntry = object
cached: string
modified: Time
size: int
settings: string
# TODO: flag to exclude from cache?
var cache_lock: Lock
var cached_files {.guard: cache_lock.}: Table[string, CacheEntry]
var cache_dir {.guard: cache_lock.}: string
template CACHE_DB_PATH: string = cache_dir & "cache.db"
initLock(cache_lock)
proc uri_to_cache_file(uri: string): string =
var path = uri
try:
let parsed = parseUri(uri)
if parsed.scheme.len > 0:
path = parsed.path & parsed.query & parsed.anchor
except: discard
for c in path:
if c.isAlphaNumeric:
result.add c
else:
result.add '_'
result = result.strip(chars = {'_'}) & "_" & $uri.hash
var last_dir {.threadVar.}: string
proc setCacheDir*(dir: string) =
assert dir != "", "A directory must be supplied."
if last_dir == dir:
return
last_dir = dir
# TODO: make dir?
assert dir.dirExists
withLock cache_lock:
cache_dir = dir.normalizePathEnd(trailingSep = true) # add /
# load database
try:
withLock cache_lock:
cached_files = readFile(CACHE_DB_PATH).parseJson.to(typeof(cached_files))
except:
echo "Could not load cache database"
proc storeCachedURI*(cache_key: string, cached_data: openArray[byte], dir = "") {.gcsafe.} =
assert cache_key != "", "cache_key can't be empty"
var dir = dir.normalizePathEnd(trailingSep = true)
withLock cache_lock:
{.cast(gcsafe).}:
assert cache_key in cached_files, "You need to call getCachedURI/Memory first."
assert cache_dir != "", "Cache directory is not configured. Call setCacheDir()."
if dir == "":
dir = cache_dir
assert cached_data.len != 0, "Can't use an empty file as cache"
# if it exists, it's overwritten so we don't need to check if there's an
# existing entry
let file_name = uri_to_cache_file(cache_key)
var f = open(dir & file_name, fmWrite)
defer: f.close()
if f.writeBuffer(cached_data[0].addr, cached_data.len) != cached_data.len:
echo "Error: Could not write cache file"
return
withLock cache_lock:
{.cast(gcsafe).}:
cached_files[cache_key].cached = file_name
# save database
writeFile CACHE_DB_PATH, $(%cached_files)
proc getCachedURI*[T](uri: string, cache_key: string, settings: T, callback: CacheCallback, use_cache = true) =
if not use_cache:
loadUri uri, proc(res: FetchResult) =
callback CacheResult(ok: res.ok, err: res.err, orig_data: res.data)
return
var dir: string
var entry: CacheEntry
withLock cache_lock:
assert cache_dir != "", "Cache directory is not configured. Call setCacheDir()."
dir = cache_dir
entry = cached_files.getOrDefault(cache_key)
let settings = when compiles($settings):
$settings
elif compiles(%settings):
$(%settings)
else:
{.error: "Can't convert settings to string or JSON".}
# TODO: do we need a way to ensure we're not requesting the file multiple timmes?
loadUri uri, headers_only = true, onload_func = proc(res: FetchResult) =
template fallback() =
# Make cache entry without the actual file, to be added later
withLock cache_lock:
cached_files[cache_key] = CacheEntry(
modified: res.modified,
size: res.size,
settings: $settings,
)
loadUri uri, proc(res: FetchResult) =
callback CacheResult(ok: res.ok, err: res.err, orig_data: res.data)
if not res.ok:
when not defined(myouLoadCacheOnFailure):
# pass through error
callback CacheResult(ok: false, err: res.err)
else:
# give cached file instead
loadUri dir & entry.cached, proc(res: FetchResult) =
callback CacheResult(ok: res.ok, err: res.err, cached_data: res.data)
# NOTE: time 0 means it may not be invalidated on change
if entry.cached == "" or
entry.modified < res.modified or
entry.size != res.size or
entry.settings != $settings:
fallback()
return
loadUri dir & entry.cached, proc(res: FetchResult) =
if res.ok:
callback CacheResult(ok: true, err: res.err, cached_data: res.data)
else:
echo "Not found: ", dir & entry.cached
echo "Missing cache file, using original file: ", uri
fallback()
proc getCachedFromMemory*[T](data: SliceMem[byte], cache_key: string, settings: T, callback: CacheCallback, use_cache = true) =
if not (use_cache and cache_key != ""):
callback CacheResult(ok: true, orig_data: data)
return
var dir: string
var entry: CacheEntry
withLock cache_lock:
assert cache_dir != "", "Cache directory is not configured. Call setCacheDir()."
dir = cache_dir
entry = cached_files.getOrDefault(cache_key)
let settings = when compiles($settings):
$settings
elif compiles(%settings):
$(%settings)
else:
{.error: "Can't convert settings to string or JSON".}
template fallback() =
echo "folbac"
# Make cache entry without the actual file, to be added later
withLock cache_lock:
cached_files[cache_key] = CacheEntry(
size: data.len,
settings: $settings,
# TODO: hash
)
callback CacheResult(ok: true, orig_data: data)
if entry.cached == "" or
entry.size != data.len or
entry.settings != $settings:
# TODO: hash
dump entry.cached
dump (entry.size, data.len)
dump (entry.settings, $settings)
fallback()
return
loadUri dir & entry.cached, proc(res: FetchResult) =
if res.ok:
callback CacheResult(ok: true, err: res.err, cached_data: res.data)
else:
echo "Not found: ", dir & entry.cached
echo "Missing cache file, using original file: ", cache_key
fallback()
# TODO: solution for partial files (mipmaps)?

View file

@ -41,11 +41,8 @@ import vmath except Quat, quat
import arr_ref
import float16
when defined(myouUsePixie):
import pixie
else:
import stb_image/read as stbi
const myouConvertHdrToFloat16 {.booldefine.} = true
import stb_image/read as stbi
const myouConvertHdrToFloat16 {.booldefine.} = true
when not defined(nimdoc):
import tinyexr
@ -113,24 +110,20 @@ proc getDimensionsFormat*(p: pointer, len: int, min_channels=0): (int, int, Text
if isEXR(p, len):
let dims = getEXRDimensions(p, len)
return (dims[0], dims[1], RGBA_f16)
when defined(myouUsePixie):
let dims = decodeImageDimensions(p, len)
return (dims.width, dims.height, RGBA_u8)
else:
# TODO, IMPORTANT: 2 channels for stb_image means grey+alpha
# We should handle those cases properly
var width, height, channels = 0
if not infoFromMemory(p.toOpenArrayByte(0, len-1), width, height, channels):
raise ValueError.newException "Could not read image"
channels = max(min_channels, channels)
let hdr = isHDRFromMemory(p.toOpenArrayByte(0, len-1))
let is16 = is16BitFromMemory(p.toOpenArrayByte(0, len-1))
# Calculate format with channels, and whether it's hdr or 16 bit
assert (RG_u8.int - R_u8.int) == 1 # (just in case someone changes the enum)
const toHDR = when myouConvertHdrToFloat16: (R_f16.int-R_u8.int) else: (R_f32.int-R_u8.int)
let format = (R_u8.int - 1 + channels +
hdr.int * toHDR + is16.int * (R_u16.int-R_u8.int)).TextureFormat
return (width, height, format)
# TODO, IMPORTANT: 2 channels for stb_image means grey+alpha
# We should handle those cases properly
var width, height, channels = 0
if not infoFromMemory(p.toOpenArrayByte(0, len-1), width, height, channels):
raise ValueError.newException "Could not read image"
channels = max(min_channels, channels)
let hdr = isHDRFromMemory(p.toOpenArrayByte(0, len-1))
let is16 = is16BitFromMemory(p.toOpenArrayByte(0, len-1))
# Calculate format with channels, and whether it's hdr or 16 bit
assert (RG_u8.int - R_u8.int) == 1 # (just in case someone changes the enum)
const toHDR = when myouConvertHdrToFloat16: (R_f16.int-R_u8.int) else: (R_f32.int-R_u8.int)
let format = (R_u8.int - 1 + channels +
hdr.int * toHDR + is16.int * (R_u16.int-R_u8.int)).TextureFormat
return (width, height, format)
proc loadFileFromSlices*(tex: Texture, slices: seq[SliceMem[byte]],
callback: proc(tex: Texture, data: SliceMem[byte]),
@ -147,21 +140,13 @@ proc loadFileFromSlices*(tex: Texture, slices: seq[SliceMem[byte]],
let min_channels = format.channel_count
let layer_stride = tex.width * tex.height * format.stride
var multilayer_buffer: ArrRef[byte]
assert tex.depth == slices.len
if tex.depth > 1:
multilayer_buffer = newArrRef[byte](layer_stride * tex.depth)
var out_buffer = newSliceMem[byte](layer_stride * tex.depth)
var pos = 0
for slice in slices:
when defined(myouUsePixie):
var image: Image
proc destructor() = discard image
else:
var image: imagePixelData[byte]
var image_16: imagePixelData[uint16]
var image_f: imagePixelData[float32]
proc destructor() =
discard image; discard image_16; discard image_f
var image: imagePixelData[byte]
var image_16: imagePixelData[uint16]
var image_f: imagePixelData[float32]
var buffer: ArrRef[byte]
# a reference to this pointer is kept with one of the vars above
var pixels_ptr: pointer
@ -174,45 +159,38 @@ proc loadFileFromSlices*(tex: Texture, slices: seq[SliceMem[byte]],
pixels_ptr = buffer[0].addr
pixels_len = buffer.len
else:
when defined(myouUsePixie):
image = decodeImage(p, len)
assert image.width == tex.width and image.height == tex.height, "Image size mismatch"
pixels_ptr = image.data[0].addr
pixels_len = image.data.len * sizeof image.data[0]
setFlipVerticallyOnLoad(flip)
flip = false
var w,h,c = 0
if isHDRFromMemory(slice.toOpenArrayByte):
image_f = loadFFromMemory(slice.toOpenArrayByte, w,h,c, min_channels)
pixels_ptr = image_f.data
pixels_len = image_f.byteLen
when myouConvertHdrToFloat16:
f32_to_f16(
cast[ptr UncheckedArray[float32]](pixels_ptr),
cast[ptr UncheckedArray[Float16]](pixels_ptr),
image_f.len)
pixels_len = pixels_len div 2
elif is16BitFromMemory(slice.toOpenArrayByte):
image_16 = load16FromMemory(slice.toOpenArrayByte, w,h,c, min_channels)
pixels_ptr = image_16.data
pixels_len = image_16.byteLen
else:
setFlipVerticallyOnLoad(flip)
flip = false
var w,h,c = 0
if isHDRFromMemory(slice.toOpenArrayByte):
image_f = loadFFromMemory(slice.toOpenArrayByte, w,h,c, min_channels)
pixels_ptr = image_f.data
pixels_len = image_f.byteLen
when myouConvertHdrToFloat16:
f32_to_f16(
cast[ptr UncheckedArray[float32]](pixels_ptr),
cast[ptr UncheckedArray[Float16]](pixels_ptr),
image_f.len)
pixels_len = pixels_len div 2
elif is16BitFromMemory(slice.toOpenArrayByte):
image_16 = load16FromMemory(slice.toOpenArrayByte, w,h,c, min_channels)
pixels_ptr = image_16.data
pixels_len = image_16.byteLen
else:
image = loadFromMemory(slice.toOpenArrayByte, w,h,c, min_channels)
pixels_ptr = image.data
pixels_len = image.len
image = loadFromMemory(slice.toOpenArrayByte, w,h,c, min_channels)
pixels_ptr = image.data
pixels_len = image.len
if layer_stride != pixels_len:
echo "Format: ", format
raise Defect.newException &"Image '{tex.name}' has a length" &
&" of {pixels_len}, expected {layer_stride}"
# TODO: make a swap_lines that copies them elsewhere
# to avoid copying two times
if flip:
swap_lines(pixels_ptr, tex.width * format.stride, tex.height)
if tex.depth == 1:
callback(tex, newSliceMem(pixels_ptr, pixels_len, destructor))
return
copyMem(multilayer_buffer[pos].addr, pixels_ptr, layer_stride)
copyMem(out_buffer[pos].addr, pixels_ptr, layer_stride)
pos += layer_stride
callback(tex, multilayer_buffer.toSliceMem)
callback(tex, out_buffer)
proc swap_lines(p: pointer, line_stride, line_count: int) {.gcsafe.} =

View file

@ -43,6 +43,7 @@
import ../types
import ./texture_decode
import ./cache
from dds_ktx import KtxInfo, KtxPart, KtxInfoParts, get_ASTC_internal_format
import arr_ref
import zstd/decompress
@ -284,6 +285,23 @@ proc loadOptimized*(tex: Texture, slices: seq[SliceMem[byte]],
callback_compressed: CallbackCompressed = nil,
flip = true, min_channels = myouMinTextureChannels) {.gcsafe.} =
# detect if we're given a cached file (by checking if it's zstandard)
# and use it directly
if slices[0].to(uint32)[0] == 0xFD2FB528'u32:
assert slices.len == 1
try:
let slices = slices[0].toOpenArray.decompress.toSliceMem.deserialize
var data = to[KtxInfoParts](slices[^1].toString)
for i,p in data.parts.mpairs:
p.data = slices[i].toPointer
tex.callback_compressed(data, slices)
except Exception as e:
echo e.getStackTrace()
echo getCurrentExceptionMsg()
# TODO: proper error handling and logging
echo "ERROR: could not load cache file for " & tex.name
return
let settings = tex.engine.cache_settings
var min_channels = min_channels
var will_compress = settings.compress_textures
@ -303,36 +321,6 @@ proc loadOptimized*(tex: Texture, slices: seq[SliceMem[byte]],
elif has_astc_support:
native_astc = true
# TODO: detect HDR support, use a fallback if unavailable
# TODO: USE A BETTER KEY, INCLUDE SETTINGS
let cache_key = tex.name
let cache_file_name = cache_key.encodeUrl & ".zst"
if settings.use_cache and callback_compressed != nil:
let cache_file = settings.cache_dir & "/" & cache_file_name
# TODO: allow networked requests
if myouAllCacheFilesExist or fileExists cache_file:
try:
when defined(myouUseAndroidAssets):
var asset = myouAndroidAPKFilePointerLength(cache_file.split("/", 1)[1])
var f = newString(asset.len)
copyMem(f.cstring.pointer, asset.p, asset.len)
let slices = f.decompress.toSliceMem.to(byte).deserialize
elif defined(ios) and myouAllCacheFilesExist:
let cache_file = myouGetBundledFilePath("assets/" & cache_file.split("/", 1)[1])
let slices = readFile(cache_file).decompress.toSliceMem.to(byte).deserialize
else:
let slices = readFile(cache_file).decompress.toSliceMem.to(byte).deserialize
var data = to[KtxInfoParts](slices[^1].toString)
for i,p in data.parts.mpairs:
p.data = slices[i].toPointer
tex.callback_compressed(data, slices)
return
except Exception as e:
echo e.getStackTrace()
echo getCurrentExceptionMsg()
# TODO: proper error handling and logging
echo cache_file
echo "ERROR: could not load cache file for " & tex.name
if not (native_bc or native_astc):
if will_encode_all:
@ -358,9 +346,8 @@ proc loadOptimized*(tex: Texture, slices: seq[SliceMem[byte]],
if settings.save_cache:
let dir = if info.is_bc: settings.cache_dir_bc
else: settings.cache_dir_astc
let cache_file = dir & "/" & cache_file_name
let outdata = refdata & @[($(%data)).toSliceMem.to(byte)]
writeFile cache_file, outdata.serialize.toOpenArray.compress
storeCachedURI tex.uri, outdata.serialize.toOpenArray.compress, dir
let channels = tex.format.channel_count
@ -424,7 +411,6 @@ proc loadOptimizedThreaded*(tex: Texture, slices: seq[SliceMem[byte]],
when compileOption("threads"):
var workers = newSeq[Thread[void]](myouEngineNumTextureThreads)
proc workerThreadProc() {.thread.} =
# TODO: handle errors
while true:
var to_decode = decode_chan.recv()
if to_decode.tex == nil:
@ -435,8 +421,15 @@ when compileOption("threads"):
compressed_return_chan.send((callback: to_decode.callback_compressed, tex: tex, data: data, refdata: refdata))
let cb_out = if to_decode.callback_uncompressed != nil: cb else: nil
let cbc_out = if to_decode.callback_compressed != nil: cbc else: nil
loadOptimized(move to_decode.tex, to_decode.slices, cb_out, cbc_out,
to_decode.flip, to_decode.min_channels)
let name = to_decode.tex.name
try:
loadOptimized(move to_decode.tex, to_decode.slices, cb_out, cbc_out,
to_decode.flip, to_decode.min_channels)
except Exception as e:
echo e.getStackTrace()
echo getCurrentExceptionMsg()
# TODO: proper error handling and logging
echo "ERROR: could not load texture " & name
decode_chan.open()
decode_return_chan.open()

View file

@ -138,7 +138,6 @@ proc initialize*(self: RenderManager) =
let zero = newArrRef[uint8](4)
zero.fill 0
self.blank_texture = self.engine.newTexture("",1,1,1,RGBA_u8,pixels=zero.to float32)
self.initialized = true
for fun in self.queue:
try:
fun()
@ -147,6 +146,7 @@ proc initialize*(self: RenderManager) =
for line in e.getStackTrace.split '\n':
echo line
echo getCurrentExceptionMsg()
self.initialized = true
self.queue.set_len 0
proc uninitialize*(self: RenderManager) =

View file

@ -62,7 +62,8 @@ proc generateMipmap*(self: Texture)
func to_sRGB*(format: TextureFormat): TextureFormat
proc newTexture*(engine: MyouEngine, name: string, data: SliceMem[byte],
is_sRGB: bool, filter: TextureFilter = Trilinear, depth=1,
flip = true, use_compression = true): Texture
flip = true, use_compression = true,
cache_key = ""): Texture
proc newTexture*(engine: MyouEngine, name: string, file_name: string, is_sRGB: bool,
filter: TextureFilter = Trilinear,
tex_type: TextureType = Tex2D,
@ -80,10 +81,10 @@ func toInternalFormat*(format: TextureFormat): GLenum
import std/bitops
import std/strformat
import loadable
import dds_ktx
import ../gpu_formats/texture_decode
import ../gpu_formats/texture_optimize
import ../gpu_formats/cache
# TODO: use and test destructor
@ -451,7 +452,8 @@ func to_sRGB*(format: TextureFormat): TextureFormat =
proc newTexture*(engine: MyouEngine, name: string, data: SliceMem[byte],
is_sRGB: bool, filter: TextureFilter = Trilinear, depth=1,
flip = true, use_compression = true): Texture =
flip = true, use_compression = true,
cache_key = ""): Texture =
var (width, height, format) = getDimensionsFormat(data.data, data.byte_len)
if is_sRGB:
if format in [RGB_u8, RGBA_u8]:
@ -461,9 +463,17 @@ proc newTexture*(engine: MyouEngine, name: string, data: SliceMem[byte],
echo &"WARNING: Texture {name} is sRGB but has format {format} _"
let self = engine.newTexture(name, width, height, depth, format, filter=filter)
self.is_sRGB = is_sRGB
self.uri = cache_key
engine.renderer.enqueue proc() =
if use_compression:
self.loadOptimizedThreaded(@[data], loadFromPixels, loadCompressedData)
if use_compression and cache_key != "":
let settings = self.engine.cache_settings
setCacheDir(settings.cache_dir)
getCachedFromMemory data, cache_key, "", use_cache = settings.use_cache, callback = proc(res: CacheResult) =
let data = if res.cached_data.len != 0:
res.cached_data
else:
res.orig_data
self.loadOptimizedThreaded(@[data], loadFromPixels, loadCompressedData)
else:
self.loadOptimizedThreaded(@[data], loadFromPixels, nil)
return self
@ -479,10 +489,22 @@ proc newTexture*(engine: MyouEngine, name: string, file_name: string, is_sRGB: b
# note: format does not matter at this point, will be replaced later
let self = engine.newTexture(name, 0, 0, filter=filter, format=RGBA_u8, tex_type=tex_type)
self.is_sRGB = is_sRGB
var res: LoadableResource
proc load(ok: bool, err: string, data: SliceMem[byte]) =
assert ok, &"Error loading texture '{self.name}' from '{file_name}': {err}"
let ktx_info = GetDdsKtxInfo(data.data, data.byte_len)
self.uri = file_name
let settings = self.engine.cache_settings
setCacheDir(settings.cache_dir)
getCachedURI self.uri, self.uri, "", use_cache = settings.use_cache, callback = proc(res: CacheResult) =
let ok = res.ok and max(res.orig_data.len, res.cached_data.len) >= 4
let err = if res.err == "": "Empty data" else: res.err
assert ok, &"Error loading texture '{self.name}' from '{self.uri}': {err}"
if use_compression and res.cached_data.len != 0:
# Got cached file, use it
self.ensure_storage()
self.loadOptimizedThreaded(@[res.cached_data], loadFromPixels, loadCompressedData)
return
let ktx_info = GetDdsKtxInfo(res.orig_data.data, res.orig_data.byte_len)
if ktx_info.isSome:
let ktx_info = ktx_info.get
assert ktx_info.is_cubemap == (self.tex_type == TexCube)
@ -497,8 +519,8 @@ proc newTexture*(engine: MyouEngine, name: string, file_name: string, is_sRGB: b
self.ensure_storage()
let info_parts = KtxInfoParts(
info: ktx_info,
parts: ParseDdsKtx(data.data, data.byte_len))
self.loadCompressedData(info_parts, @[data])
parts: ParseDdsKtx(res.orig_data.data, res.orig_data.byte_len))
self.loadCompressedData(info_parts, @[res.orig_data])
self.loaded = true
# except Exception as e:
except:
@ -508,7 +530,7 @@ proc newTexture*(engine: MyouEngine, name: string, file_name: string, is_sRGB: b
echo getCurrentExceptionMsg()
echo "Error loading texture " & file_name
else:
var (width, height, format) = getDimensionsFormat(data.data, data.byte_len, 3)
var (width, height, format) = getDimensionsFormat(res.orig_data.data, res.orig_data.byte_len, 3)
if is_sRGB:
if format in [RGB_u8, RGBA_u8]:
format = format.to_sRGB
@ -522,15 +544,13 @@ proc newTexture*(engine: MyouEngine, name: string, file_name: string, is_sRGB: b
try:
self.ensure_storage()
if use_compression:
self.loadOptimizedThreaded(@[data], loadFromPixels, loadCompressedData)
self.loadOptimizedThreaded(@[res.orig_data], loadFromPixels, loadCompressedData)
else:
self.loadOptimizedThreaded(@[data], loadFromPixels, nil)
self.loadOptimizedThreaded(@[res.orig_data], loadFromPixels, nil)
except:
# TODO: use logging
echo getCurrentExceptionMsg()
echo "Error loading texture " & file_name
res = file_name.loadUri(load, auto_start=false)
res.start()
return self
proc setExtrapolation*(self: Texture, ext: TextureExtrapolation) =

View file

@ -45,6 +45,7 @@ import std/strformat
import std/bitops
import std/options
import std/json
import std/os
import vmath except Quat, quat
import ../quat
import ../util
@ -109,11 +110,11 @@ proc loadAsync(self: BlendLoader, callback: proc(err: string)) =
else:
self.close()
self.resource = loadUri(self.blend_file_path,
proc(ok, err, data: auto) =
proc(res: auto) =
self.resource = nil
if ok:
self.blend_file = openBlendFile(self.blend_file_path, data.data, data.byte_len)
callback(err)
if res.ok:
self.blend_file = openBlendFile(self.blend_file_path, res.data.data, res.data.byte_len)
callback(res.err)
)
type BlenderObTypes = enum
@ -175,6 +176,8 @@ method loadTextureImpl*(self: BlendLoader, name: string, img: FNode): Texture =
let is_sRGB = color_space == "sRGB"
# echo img
if is_packed:
let blend_name = self.blend_file_path.extractFilename
let cache_key = blend_name & "/Images/" & name
# echo "loading image as PACKED file"
# todo: was this necessary?
# let seek = img.packedfile.seek.i32[0]
@ -182,7 +185,7 @@ method loadTextureImpl*(self: BlendLoader, name: string, img: FNode): Texture =
let arr = img.packedfile.data.get_array(size, byte)
# TODO: get SliceMems from blend file instead of UncheckedArray
let s = SliceMem[byte](data: arr, byte_len: size)
return self.engine.newTexture(name, s, is_sRGB)
return self.engine.newTexture(name, s, is_sRGB, cache_key = cache_key)
else:
# echo "loading image as REGULAR file"
if self.path_handler != nil:
@ -209,10 +212,6 @@ method loadTextureImpl*(self: BlendLoader, name: string, img: FNode): Texture =
assert false, &"Image source not supported yet: {source}, image '{name}'"
method loadTexture*(self: BlendLoader, name: string): Texture =
# sources:
# 1 image
# 4 generated
# 5 render
if name in self.textures:
return self.textures[name]
for img in self.blend_file.named_blocks["IM"]:

View file

@ -658,6 +658,7 @@ type
Texture* = ref object of RootObj
engine* {.cursor.}: MyouEngine
name*: string
uri*: string
storage*: TextureStorage ## private
loaded*: bool
last_used_shader* {.cursor.}: Shader ## private