diff --git a/src/gpu_formats/texture_decode.nim b/src/gpu_formats/texture_decode.nim index 183aa53..ef81b31 100644 --- a/src/gpu_formats/texture_decode.nim +++ b/src/gpu_formats/texture_decode.nim @@ -142,6 +142,10 @@ proc loadFileFromSlices*(tex: Texture, slices: seq[SliceMem[byte]], tex.format else: tex.format.resize(min_channels) + # TODO: Don't do this!! + # Change the format when the decoder has detected less channels!! + let min_channels = format.channel_count + let layer_stride = tex.width * tex.height * format.stride var multilayer_buffer: ArrRef[byte] assert tex.depth == slices.len @@ -197,8 +201,10 @@ proc loadFileFromSlices*(tex: Texture, slices: seq[SliceMem[byte]], image = loadFromMemory(slice.toOpenArrayByte, w,h,c, min_channels) pixels_ptr = image.data pixels_len = image.len - assert layer_stride == pixels_len, - &"Image '{tex.name}' has a length of {pixels_len}, expected {layer_stride}" + if layer_stride != pixels_len: + echo "Format: ", format + raise Defect.newException &"Image '{tex.name}' has a length" & + &" of {pixels_len}, expected {layer_stride}" if flip: swap_lines(pixels_ptr, tex.width * format.stride, tex.height) if tex.depth == 1: