Optimizer.hpp
1.6 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
//
// Optimizer.hpp
// MNN
//
// Created by MNN on 2019/08/20.
// Copyright © 2018, Alibaba Group Holding Limited
//
#ifndef Optimizer_hpp
#define Optimizer_hpp
#include <MNN/expr/Expr.hpp>
#include <MNN/MNNForwardType.h>
namespace MNN {
namespace Express {
class MNN_PUBLIC Optimizer {
public:
enum Device {
CPU = 0,
GPU = 1,
OTHER = 2,
AUTO = 3
};
struct Config {
Device device = CPU;
MNNForwardType forwardType = MNN_FORWARD_ALL;
int numThread = 4;
};
static std::shared_ptr<Optimizer> create(Config config);
struct Cost {
float compute; // MFlops
float memory; // MB
};
class Parameters {
public:
Parameters(int n);
virtual ~Parameters();
float* get() const {
return mValue;
}
int size() const {
return mSize;
}
private:
float* mValue;
int mSize;
};
virtual std::shared_ptr<Parameters> onGetParameters(const std::vector<VARP>& outputs) {
return nullptr;
}
//Given paramters and measure cost, the parameters must be the same as onGetParameters
virtual Cost onMeasure(const std::vector<VARP>& outputs, std::shared_ptr<Parameters> parameters = nullptr) = 0;
//Modify the output directly, the parameters must be the same as onGetParameters
virtual bool onExecute(const std::vector<VARP>& outputs, std::shared_ptr<Parameters> parameters = nullptr) = 0;
Optimizer() = default;
virtual ~Optimizer() = default;
};
} // namespace Express
} // namespace MNN
#endif