This repository has been archived by the owner on Mar 13, 2020. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 1
/
learningclassificator.cpp
68 lines (56 loc) · 1.83 KB
/
learningclassificator.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
/*
* Dempster-Shafer Library for Evidence-Theory
* Thilo Michael, Jeffrey Jedele
* 2012
* > classificator, implementation
*/
#include "learningclassificator.hpp"
#include <stdlib.h>
#include <iostream>
using namespace std;
LearningClassificator::LearningClassificator(double learning_rate, int size) {
if(learning_rate > 1.0 || learning_rate < 0.0) {
// Learning rate must be between 0.0 and 1.0
throw 1;
}
if(size < 1) {
// Size must be >= 1
throw 2;
}
this->learning_rate = learning_rate;
this->current_index = 0;
this->maximum_index = size-1;
this->first_feature_average = (double*) malloc(size * sizeof(double));
}
LearningClassificator::~LearningClassificator() {
free(first_feature_average);
}
int LearningClassificator::add_feature(double initial_average) {
if(current_index > maximum_index) {
// Classificator can't hold more features.
throw 1;
}
// use first_feature_index as starting point and current index to access the next field
*(first_feature_average + current_index) = initial_average;
return current_index++;
}
double LearningClassificator::classify(int feature, double value) {
if(feature > current_index) {
// feature does not exist
throw 1;
}
double avg = *(first_feature_average + feature);
double classification = 0.0;
if(value > avg) {
// value must be >0 if the classified value is larger than the average
classification = -1.0 + value/avg;
classification = (classification>1.0) ? 1.0 : classification; // cap to 1.0
} else {
// value must be <0 if the classified value is smaller than the average or 0 if both are equal
classification = 1.0 - avg/value;
classification = (classification<-1.0) ? -1.0 : classification; // cap to -1.0
}
// adjust the average for the feature
*(first_feature_average + feature) = learning_rate * value + (1-learning_rate) * avg;
return classification;
}