chundoong-lab-ta/APWS23/project/tensor.h

61 lines
1.1 KiB
C
Raw Normal View History

2023-02-01 22:30:00 +09:00
#pragma once
#include <cstdlib>
#include <vector>
// You can modify the data structure as you want
struct Tensor {
// Alloc memory
Tensor(std::vector<int> shape_) {
ndim = shape_.size();
2023-02-02 19:02:36 +09:00
for (int i = 0; i < ndim; i++) {
2023-02-01 22:30:00 +09:00
shape[i] = shape_[i];
}
2023-02-02 19:02:36 +09:00
int n = num_elem();
2023-02-01 22:30:00 +09:00
buf = (float*)malloc(n * sizeof(float));
}
// Alloc memory and copy
Tensor(std::vector<int> shape_, float *buf_) {
ndim = shape_.size();
2023-02-02 19:02:36 +09:00
for (int i = 0; i < ndim; i++) {
2023-02-01 22:30:00 +09:00
shape[i] = shape_[i];
}
2023-02-02 19:02:36 +09:00
int n = num_elem();
2023-02-01 22:30:00 +09:00
buf = (float*)malloc(n * sizeof(float));
2023-02-02 23:37:40 +09:00
for (int i=0; i<n; ++i){
buf[i] = buf_[i];
}
2023-02-01 22:30:00 +09:00
}
~Tensor() {
if (buf != nullptr)
free(buf);
}
void set_zero() {
2023-02-02 19:02:36 +09:00
int n = num_elem();
2023-02-01 22:30:00 +09:00
buf = (float*)malloc(n * sizeof(float));
2023-02-02 19:02:36 +09:00
for (int i = 0; i < n; i++){
2023-02-01 22:30:00 +09:00
buf[i] = 0.0;
}
}
2023-02-02 19:02:36 +09:00
int num_elem() {
int sz = 1;
for (int i = 0; i < ndim; i++)
2023-02-01 22:30:00 +09:00
sz *= shape[i];
return sz;
}
// Pointer to data
float *buf = nullptr;
// Shape of tensor, from outermost dimension to innermost dimension.
// e.g., {{1.0, -0.5, 2.3}, {4.3, 5.6, -7.8}} => shape = {2, 3}
2023-02-02 19:02:36 +09:00
int ndim = 0;
int shape[4];
2023-02-01 22:30:00 +09:00
};