-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathevaluators.hpp
133 lines (106 loc) · 4.02 KB
/
evaluators.hpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 3 of the License, or
* (at your option) any later version.
*
* Written (W) 2015 Kostiantyn Antoniuk
* Copyright (C) 2015 Kostiantyn Antoniuk
*/
#ifndef evaluators_hpp
#define evaluators_hpp
#include <vector>
#include "oracle/ordinal_regression.h"
#include "oracle/mord_regularized.h"
#include "oracle/pw_vilma_regularized.h"
namespace VilmaEvaluators {
typedef Vilma::DenseVector<double> DenseVecD;
class ModelEvaluator {
public:
virtual ~ModelEvaluator() noexcept(true) = default;
virtual double Evaluate(Data *data, double *params) const = 0;
};
template <class Loss>
class OrdModelEvaluator : public ModelEvaluator {
public:
OrdModelEvaluator() = default;
virtual ~OrdModelEvaluator() noexcept(true) = default;
virtual double Evaluate(Data *data, double *params) const {
const int num_examples = data->GetDataNumExamples();
const int dim_x = data->GetDataDim();
Loss loss;
int error = 0;
DenseVecD theta(data->GetDataNumClasses() - 1, params + dim_x);
for (int ex = 0; ex < num_examples; ++ex) {
const Vilma::SparseVector<double> &x = *data->x->GetRow(ex);
const int y = data->y->data_[ex];
const double wx = x.dot<DenseVecD>(DenseVecD(dim_x, params));
int pred_y = VilmaOracle::OrdinalRegression::SingleExampleBestLabelLookup(
wx, theta, data->GetDataNumClasses());
error += loss(y, pred_y);
}
return 1. * error / num_examples;
}
};
template <class Loss>
class MOrdModelEvaluator : public ModelEvaluator {
public:
MOrdModelEvaluator() = default;
virtual ~MOrdModelEvaluator() noexcept(true) = default;
double Evaluate(Data *data, double *params) const override {
const int num_examples = data->GetDataNumExamples();
const int dim_x = data->GetDataDim();
Loss loss;
int error = 0;
DenseVecD beta(data->GetDataNumClasses(), params + dim_x);
for (int ex = 0; ex < num_examples; ++ex) {
const Vilma::SparseVector<double> &x = *data->x->GetRow(ex);
const int y = data->y->data_[ex];
const double wx = x.dot<DenseVecD>(DenseVecD(dim_x, params));
auto ret =
VilmaOracle::MOrdRegularized<Loss>::SingleExampleBestLabelLookup(
wx, beta, 0, data->GetDataNumClasses() - 1, -1, &loss);
int pred_y = std::get<1>(ret);
error += loss(y, pred_y);
}
return 1. * error / num_examples;
}
};
template <class Loss>
class PwMOrdModelEvaluator : public ModelEvaluator {
public:
PwMOrdModelEvaluator() = delete;
virtual ~PwMOrdModelEvaluator() noexcept(true) = default;
PwMOrdModelEvaluator(const std::vector<int> &cut_labels) {
cut_labels_ = cut_labels;
}
double Evaluate(Data *data, double *params) const override {
const int num_examples = data->GetDataNumExamples();
const int dim_x = data->GetDataDim();
const int ny = data->GetDataNumClasses();
Loss loss;
int error = 0;
const int kPW = (int)cut_labels_.size();
double *alpha_buffer =
VilmaOracle::PwVilmaRegularized<Loss>::BuildAlphas(cut_labels_, ny);
std::vector<double> wx(num_examples * kPW);
DenseVecD weights(dim_x * kPW, params);
VilmaOracle::PwVilmaRegularized<Loss>::ProjectData(weights, data, &wx[0],
kPW);
const double *beta = params + dim_x * kPW;
for (int example_idx = 0; example_idx < num_examples; ++example_idx) {
const int gt_y = data->y->operator[](example_idx);
const auto subproblem_res =
VilmaOracle::PwVilmaRegularized<Loss>::SingleExampleBestLabelLookup(
&wx[0] + example_idx * kPW, alpha_buffer, beta, 0, ny - 1, -1,
kPW, nullptr);
const int &best_y = std::get<1>(subproblem_res);
error += loss(gt_y, best_y);
}
return 1. * error / num_examples;
}
protected:
std::vector<int> cut_labels_;
};
} // namespace
#endif /* evaluators_hpp */