I am still relatively new to doing this even in c, because i was used to using
vector in c++.
But since i am learning nim i don't know the way to correctly put a pointer
into a pre allocated pointer using cast in nim.
So here is what i'm working on.
import ../obj_parser, streams
type
BinWriter* = ref object of RootObj
obj: ref obj_data
proc readObjFile*(self:BinWriter,name:string) =
self.obj = getObjFile(name)
proc createBinary*(self:BinWriter,filename:string) =
var s = newFileStream(filename,fmwrite);
if s != nil:
var total : int = 0
total += (self.obj.vert.len * sizeof(float32))
total += (self.obj.face.len * sizeof(uint32))
total += (self.obj.nrml.len * sizeof(float32))
total += (self.obj.tex.len * sizeof(float32))
var all : pointer = nil
var vert = alloc(self.obj.vert.len * sizeof(float32))
for i in 0 .. <self.obj.vert.len:
vert = cast[pointer](cast[int](self.obj.vert) + i)
echo("sizeof vert: ",repr(vert))
var face = alloc(self.obj.face.len * sizeof(uint32))
for i in 0 .. <self.obj.face.len:
face = cast[pointer](cast[int](self.obj.face) + i)
echo("sizeof face: ",repr(face))
var nrml = alloc(self.obj.nrml.len * sizeof(float32))
for i in 0 .. <self.obj.nrml.len:
nrml = cast[pointer](cast[int](self.obj.nrml) + i)
echo("sizeof nrml: ",repr(nrml))
var tex = alloc(self.obj.tex.len * sizeof(float32))
for i in 0 .. <self.obj.tex.len:
tex = cast[pointer](cast[int](self.obj.tex) + i)
echo("sizeof tex: ",repr(tex))
#all = cast[all](all + vert + face + nrml + tex) dont know what to do
here???
dealloc(tex)
dealloc(nrml)
dealloc(face)
dealloc(vert)
#dealloc(all)
var binary = BinWriter()
binary.readObjFile("../u.obj")
binary.createBinary("u.bin")
so i checked around but saw no explanation or example of how to do this but
i'll keep looking.