Open
Description
I have a trouble with mnist.cpp runtime, I have already put data folder in build manually. And it raise error:
terminate called after throwing an instance of 'c10::Error'
what(): dim() called on undefined Tensor
Exception raised from dim at /pytorch/c10/core/UndefinedTensorImpl.cpp:24 (most recent call first):
frame #0: c10::Error::Error(c10::SourceLocation, std::string) + 0x42 (0x7f943aef28c2 in /home/khangt1k25/libtorch/lib/libc10.so)
frame #1: c10::UndefinedTensorImpl::dim() const + 0x325 (0x7f943aedfe75 in /home/khangt1k25/libtorch/lib/libc10.so)
frame #2: at::native::size(at::Tensor const&, long) + 0x23 (0x7f942b012073 in /home/khangt1k25/libtorch/lib/libtorch_cpu.so)
frame #3: at::Tensor::size(long) const + 0xed (0x7f942b6b541d in /home/khangt1k25/libtorch/lib/libtorch_cpu.so)
frame #4: torch::data::datasets::MNIST::size() const + 0xf (0x7f942d56cb7f in /home/khangt1k25/libtorch/lib/libtorch_cpu.so)
frame #5: torch::data::datasets::MapDataset<torch::data::datasets::MNIST, torch::data::transforms::Normalize<at::Tensor> >::size() const + 0x1d (0x56324d6c25ad in ./mnist)
frame #6: torch::data::datasets::MapDataset<torch::data::datasets::MapDataset<torch::data::datasets::MNIST, torch::data::transforms::Normalize<at::Tensor> >, torch::data::transforms::Stack<torch::data::Example<at::Tensor, at::Tensor> > >::size() const + 0x1d (0x56324d6bdc69 in ./mnist)
frame #7: main + 0x531 (0x56324d6b0068 in ./mnist)
frame #8: __libc_start_main + 0xe7 (0x7f9429438bf7 in /lib/x86_64-linux-gnu/libc.so.6)
frame #9: _start + 0x2a (0x56324d6af4ba in ./mnist)
Aborted (core dumped)
and here my mnist.cpp:
torch::manual_seed(16);
Net model;
auto train_dataset = torch::data::datasets::MNIST("data").map(torch::data::transforms::Normalize<>(0.1307, 0.3081)).map(torch::data::transforms::Stack<>());
auto test_dataset = torch::data::datasets::MNIST("data", torch::data::datasets::MNIST::Mode::kTest).map(torch::data::transforms::Normalize<>(0.1307, 0.3081)).map(torch::data::transforms::Stack<>());
auto train_loader = torch::data::make_data_loader<torch::data::samplers::SequentialSampler>(std::move(train_dataset), kTrainBatchSize);
auto test_loader = torch::data::make_data_loader(std::move(test_dataset), kTestBatchSize);
torch::optim::SGD optimizer(
model.parameters(), torch::optim::SGDOptions(0.01).momentum(0.5));
const size_t test_dataset_size = test_dataset.size().value();
for (size_t epoch = 1; epoch <= kNumberOfEpochs; ++epoch){
train(model, *train_loader, optimizer, epoch);
test(model, *test_loader, test_dataset_size);
}
return 0;
Anyone can help me with this?? Thanks