-
Notifications
You must be signed in to change notification settings - Fork 1
/
GenerateModel_RandomForest.m
71 lines (61 loc) · 2.64 KB
/
GenerateModel_RandomForest.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
function [trainedClassifier, validationAccuracy] = GenerateModel_RandomForest(Learners_values, Labels, Model_parameters, partition_for_CV, Class_list, Learners_names)
% [trainedClassifier, validationAccuracy] = trainClassifier(trainingData)
% returns a trained classifier and its accuracy. This code recreates the
% classification model trained in Classification Learner app. Use the
% generated code to automate training the same model with new data, or to
% learn how to programmatically train models.
%
% Input:
% trainingData: a table containing the same predictor and response
% columns as imported into the app.
%
% Output:
% trainedClassifier: a struct containing the trained classifier. The
% struct contains various fields with information about the trained
% classifier.
%
% trainedClassifier.predictFcn: a function to make predictions on new
% data.
%
% validationAccuracy: a double containing the accuracy in percent. In
% the app, the History list displays this overall accuracy score for
% each model.
%
% Use the code to train the model with new data. To retrain your
% classifier, call the function from the command line with your original
% data or new data as the input argument trainingData.
%
% For example, to retrain a classifier trained with the original data set
% T, enter:
% [trainedClassifier, validationAccuracy] = trainClassifier(T)
%
% To make predictions with the returned 'trainedClassifier' on new data T2,
% use
% yfit = trainedClassifier.predictFcn(T2)
%
% T2 must be a table containing at least the same predictor columns as used
% during training. For details, enter:
% trainedClassifier.HowToPredict
% Auto-generated by MATLAB on 25-Aug-2017 12:32:19
MaxNumSplits = Model_parameters{1};
NumLearningCycles = Model_parameters{2};
template = templateTree('MaxNumSplits', MaxNumSplits);
classificationEnsemble = fitcensemble(...
Learners_values, ...
Labels, ...
'Method', 'Bag', ...
'NumLearningCycles', NumLearningCycles, ...
'Learners', template, ...
'ClassNames', categorical(Class_list),...
'PredictorNames', Learners_names);
trainedClassifier.ClassificationEnsemble = classificationEnsemble;
if ~isempty(partition_for_CV)
% Perform cross-validation
partitionedModel = crossval(trainedClassifier.ClassificationEnsemble, 'CVpartition', partition_for_CV);
% Compute validation accuracy
validationAccuracy = 1 - kfoldLoss(partitionedModel, 'LossFun', 'ClassifError');
disp(['Accuracy: ',num2str(100*validationAccuracy,'%.1f'),' %']);
else
validationAccuracy = [];
end
end