torch-in-action icon indicating copy to clipboard operation
torch-in-action copied to clipboard

Chapter 2 error

Open nicholas-leonard opened this issue 8 years ago • 0 comments

The code below caused a core dump :

The code below caused a core dump :
root@3f440e07-6944-4ec4-dd75-96d990d739c4:~/mnist# cat loadit.lua
require 'paths'
datapath = paths.cwd()
print(datapath)
testlabel = "train-labels-idx1-ubyte"
labelpath = paths.concat(datapath, testlabel)
assert(paths.filep(labelpath), "File does not exist: "..labelpath)
file = io.open(labelpath, "r")
data = file:read("*a")
file:close()
print("No characters: ", #data)
labels = data:sub(-60000, -1)
print("No labels: " .. #labels)
targets = torch.LongTensor(#labels):fill(-1)
print(targets:size())
for i=1, #labels do
        class = labels:byte(i)
--      print(type(class), class)
        targets[i] = class
end
targets:add(1)
assert(targets:max() == 10 and targets:min() == 1)
torch.save(paths.concat(datapath, "traintargets.t7"), targets)
----------- Load up the images --------
testimage = "train-images-idx3-ubyte"
imagepath = paths.concat(datapath, testimage)
file = io.open(imagepath)
data = file:read("*a")
--file:close()
print("No of bytes: ", #data)
images = data:sub(16 + 1, -1)
print(#images, #images / 28 * 28)
--inputs = torch.FloatTensor(#labels, 1, 28, 28)
--for i = 1, #labels do
--      for j = 1, 28 do
--              for k = 1, 28 do
--                      idx = (i - 1) * 28 * 28 + (j - 1) * 28 + k
--                      inputs[{i, 1, j, k}] = images:byte(idx)
--              end
--      end
--end
--
inputs = torch.FloatTensor(#labels, 1, 28, 28)
storage = inputs:storage()
for idx = 1, #images do
        storage[idx] = images:byte(idx)
end
ffi = require 'ffi'
inputs = torch.ByteTensor(#labels, 1, 28, 28)
idata = inputs:data()
ffi.copy(idata, images)
print("lets crash")
inputs = inputs:float()
print("worked")
assert(targets:max() <= 255 and targets:min() >= 0)
torch.save(paths.concat(datapath, "traininputs.t7"), inputs)

nicholas-leonard avatar Dec 08 '16 04:12 nicholas-leonard