diff --git a/AnomalyDetection/LibLinear/src/main/java/org/tribuo/anomaly/liblinear/LibLinearAnomalyTrainer.java b/AnomalyDetection/LibLinear/src/main/java/org/tribuo/anomaly/liblinear/LibLinearAnomalyTrainer.java
index 7efdd71da..e10ac708b 100644
--- a/AnomalyDetection/LibLinear/src/main/java/org/tribuo/anomaly/liblinear/LibLinearAnomalyTrainer.java
+++ b/AnomalyDetection/LibLinear/src/main/java/org/tribuo/anomaly/liblinear/LibLinearAnomalyTrainer.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2021, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2021, 2022, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -42,10 +42,6 @@
/**
* A {@link Trainer} which wraps a liblinear-java anomaly detection trainer using a one-class SVM.
*
- * Note the train method is synchronized on {@code LibLinearTrainer.class} due to a global RNG in liblinear-java.
- * This is insufficient to ensure reproducibility if liblinear-java is used directly in the same JVM as Tribuo, but
- * avoids locking on classes Tribuo does not control.
- *
* See:
*
* Fan RE, Chang KW, Hsieh CJ, Wang XR, Lin CJ.
@@ -94,6 +90,8 @@ public LibLinearAnomalyTrainer(LinearAnomalyType trainerType, double cost, doubl
/**
* Creates a trainer for a LibLinear model
+ *
+ * Uses {@link Trainer#DEFAULT_SEED} as the RNG seed.
* @param trainerType Loss function and optimisation method combination.
* @param cost Cost penalty for each incorrectly classified training point.
* @param maxIterations The maximum number of dataset iterations.
@@ -101,7 +99,20 @@ public LibLinearAnomalyTrainer(LinearAnomalyType trainerType, double cost, doubl
* @param nu The nu parameter in the one-class SVM.
*/
public LibLinearAnomalyTrainer(LinearAnomalyType trainerType, double cost, int maxIterations, double terminationCriterion, double nu) {
- super(trainerType,cost,maxIterations,terminationCriterion);
+ this(trainerType,cost,maxIterations,terminationCriterion,nu,Trainer.DEFAULT_SEED);
+ }
+
+ /**
+ * Creates a trainer for a LibLinear model
+ * @param trainerType Loss function and optimisation method combination.
+ * @param cost Cost penalty for each incorrectly classified training point.
+ * @param maxIterations The maximum number of dataset iterations.
+ * @param terminationCriterion How close does the optimisation function need to be before terminating that subproblem (usually set to 0.1).
+ * @param nu The nu parameter in the one-class SVM.
+ * @param seed The RNG seed.
+ */
+ public LibLinearAnomalyTrainer(LinearAnomalyType trainerType, double cost, int maxIterations, double terminationCriterion, double nu, long seed) {
+ super(trainerType,cost,maxIterations,terminationCriterion,seed);
this.nu = nu;
}
@@ -119,7 +130,7 @@ public void postConfig() {
@Override
protected Parameter setupParameters(ImmutableOutputInfo labelIDMap) {
libLinearParams.setNu(nu);
- return libLinearParams;
+ return libLinearParams.clone();
}
@Override
@@ -131,9 +142,6 @@ protected List trainModels(Parameter curParams, int numFeatures, FeatureN
data.x = features;
data.n = numFeatures;
- // Note this isn't sufficient for reproducibility as it doesn't cope with concurrency.
- // Concurrency safety is handled by the global lock on LibLinearTrainer.class in LibLinearTrainer.train.
- Linear.resetRandom();
return Collections.singletonList(Linear.train(data,curParams));
}
diff --git a/Classification/LibLinear/src/main/java/org/tribuo/classification/liblinear/LibLinearClassificationTrainer.java b/Classification/LibLinear/src/main/java/org/tribuo/classification/liblinear/LibLinearClassificationTrainer.java
index 0940a1817..1c10e96cd 100644
--- a/Classification/LibLinear/src/main/java/org/tribuo/classification/liblinear/LibLinearClassificationTrainer.java
+++ b/Classification/LibLinear/src/main/java/org/tribuo/classification/liblinear/LibLinearClassificationTrainer.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2015-2021, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2015, 2022, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -45,10 +45,6 @@
/**
* A {@link Trainer} which wraps a liblinear-java classifier trainer.
*
- * Note the train method is synchronized on {@code LibLinearTrainer.class} due to a global RNG in liblinear-java.
- * This is insufficient to ensure reproducibility if liblinear-java is used directly in the same JVM as Tribuo, but
- * avoids locking on classes Tribuo does not control.
- *
* See:
*
* Fan RE, Chang KW, Hsieh CJ, Wang XR, Lin CJ.
@@ -70,14 +66,16 @@ public class LibLinearClassificationTrainer extends LibLinearTrainer impl
private Map labelWeights = Collections.emptyMap();
/**
- * Creates a trainer using the default values (L2R_L2LOSS_SVC_DUAL, 1, 0.1).
+ * Creates a trainer using the default values ({@link LinearType#L2R_L2LOSS_SVC_DUAL}, 1, 0.1, {@link Trainer#DEFAULT_SEED}).
*/
public LibLinearClassificationTrainer() {
this(new LinearClassificationType(LinearType.L2R_L2LOSS_SVC_DUAL),1,1000,0.1);
}
/**
- * Creates a trainer for a LibLinearClassificationModel. Sets maxIterations to 1000.
+ * Creates a trainer for a LibLinearClassificationModel.
+ *
+ * Uses {@link Trainer#DEFAULT_SEED} as the RNG seed. Sets maxIterations to 1000.
* @param trainerType Loss function and optimisation method combination.
* @param cost Cost penalty for each incorrectly classified training point.
* @param terminationCriterion How close does the optimisation function need to be before terminating that subproblem (usually set to 0.1).
@@ -88,13 +86,27 @@ public LibLinearClassificationTrainer(LinearClassificationType trainerType, doub
/**
* Creates a trainer for a LibLinear model
+ *
+ * Uses {@link Trainer#DEFAULT_SEED} as the RNG seed.
* @param trainerType Loss function and optimisation method combination.
* @param cost Cost penalty for each incorrectly classified training point.
* @param maxIterations The maximum number of dataset iterations.
* @param terminationCriterion How close does the optimisation function need to be before terminating that subproblem (usually set to 0.1).
*/
public LibLinearClassificationTrainer(LinearClassificationType trainerType, double cost, int maxIterations, double terminationCriterion) {
- super(trainerType,cost,maxIterations,terminationCriterion);
+ this(trainerType,cost,maxIterations,terminationCriterion,Trainer.DEFAULT_SEED);
+ }
+
+ /**
+ * Creates a trainer for a LibLinear model
+ * @param trainerType Loss function and optimisation method combination.
+ * @param cost Cost penalty for each incorrectly classified training point.
+ * @param maxIterations The maximum number of dataset iterations.
+ * @param terminationCriterion How close does the optimisation function need to be before terminating that subproblem (usually set to 0.1).
+ * @param seed The RNG seed.
+ */
+ public LibLinearClassificationTrainer(LinearClassificationType trainerType, double cost, int maxIterations, double terminationCriterion, long seed) {
+ super(trainerType,cost,maxIterations,terminationCriterion,seed);
}
/**
@@ -118,9 +130,6 @@ protected List trainModels(Parameter curParams, int numFeatures, FeatureN
data.n = numFeatures;
data.bias = 1.0;
- // Note this isn't sufficient for reproducibility as it doesn't cope with concurrency.
- // Concurrency safety is handled by the global lock on LibLinearTrainer.class in LibLinearTrainer.train.
- Linear.resetRandom();
return Collections.singletonList(Linear.train(data,curParams));
}
@@ -148,9 +157,8 @@ protected Pair extractData(Dataset data, Imm
@Override
protected Parameter setupParameters(ImmutableOutputInfo labelIDMap) {
- Parameter curParams;
+ Parameter curParams = libLinearParams.clone();
if (!labelWeights.isEmpty()) {
- curParams = new Parameter(libLinearParams.getSolverType(),libLinearParams.getC(),libLinearParams.getEps());
double[] weights = new double[labelIDMap.size()];
int[] indices = new int[labelIDMap.size()];
int i = 0;
@@ -167,8 +175,6 @@ protected Parameter setupParameters(ImmutableOutputInfo labelIDMap) {
}
curParams.setWeights(weights,indices);
//logger.info("Weights = " + Arrays.toString(weights) + ", labels = " + Arrays.toString(indices) + ", outputIDInfo = " + outputIDInfo);
- } else {
- curParams = libLinearParams;
}
return curParams;
}
diff --git a/Classification/LibLinear/src/test/java/org/tribuo/classification/liblinear/TestLibLinearModel.java b/Classification/LibLinear/src/test/java/org/tribuo/classification/liblinear/TestLibLinearModel.java
index 44a2d3f1e..076329bb0 100644
--- a/Classification/LibLinear/src/test/java/org/tribuo/classification/liblinear/TestLibLinearModel.java
+++ b/Classification/LibLinear/src/test/java/org/tribuo/classification/liblinear/TestLibLinearModel.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2015-2021, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2015, 2022, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -64,6 +64,7 @@
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -187,15 +188,21 @@ public Model testLibLinear(Pair,Dataset> p) {
@Test
public void testReproducible() {
- // Note this test will need to change if LibLinearTrainer grows a per Problem RNG.
Pair,Dataset> p = LabelledDataGenerator.denseTrainTest();
+ t.setInvocationCount(0);
Model m = t.train(p.getA());
Map>> mFeatures = m.getTopFeatures(-1);
+ t.setInvocationCount(0);
Model mTwo = t.train(p.getA());
Map>> mTwoFeatures = mTwo.getTopFeatures(-1);
assertEquals(mFeatures,mTwoFeatures);
+
+ Model mThree = t.train(p.getA());
+ Map>> mThreeFeatures = mThree.getTopFeatures(-1);
+
+ assertNotEquals(mFeatures, mThreeFeatures);
}
@Test
diff --git a/Common/LibLinear/src/main/java/org/tribuo/common/liblinear/LibLinearTrainer.java b/Common/LibLinear/src/main/java/org/tribuo/common/liblinear/LibLinearTrainer.java
index 0a5bc3bc2..c6ab652c4 100644
--- a/Common/LibLinear/src/main/java/org/tribuo/common/liblinear/LibLinearTrainer.java
+++ b/Common/LibLinear/src/main/java/org/tribuo/common/liblinear/LibLinearTrainer.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2015-2021, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2015, 2022, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -39,15 +39,13 @@
import java.util.Collections;
import java.util.List;
import java.util.Map;
+import java.util.Random;
+import java.util.SplittableRandom;
import java.util.logging.Logger;
/**
* A {@link Trainer} which wraps a liblinear-java trainer.
*
- * Note the train method is synchronized on {@code LibLinearTrainer.class} due to a global RNG in liblinear-java.
- * This is insufficient to ensure reproducibility if liblinear-java is used directly in the same JVM as Tribuo, but
- * avoids locking on classes Tribuo does not control.
- *
* See:
*
* Fan RE, Chang KW, Hsieh CJ, Wang XR, Lin CJ.
@@ -82,12 +80,22 @@ public abstract class LibLinearTrainer> implements Trainer
+ * Uses {@link Trainer#DEFAULT_SEED} as the RNG seed, and 0.1 as epsilon.
* @param trainerType Loss function and optimisation method combination.
* @param cost Cost penalty for each incorrectly classified training point.
* @param maxIterations The maximum number of dataset iterations.
@@ -103,14 +111,41 @@ protected LibLinearTrainer(LibLinearType trainerType, double cost, int maxIte
* @param cost Cost penalty for each incorrectly classified training point.
* @param maxIterations The maximum number of dataset iterations.
* @param terminationCriterion How close does the optimisation function need to be before terminating that subproblem (usually set to 0.1).
+ */
+ protected LibLinearTrainer(LibLinearType trainerType, double cost, int maxIterations, double terminationCriterion, long seed) {
+ this(trainerType,cost,maxIterations,terminationCriterion,0.1, seed);
+ }
+
+ /**
+ * Creates a trainer for a LibLinear model
+ *
+ * Uses {@link Trainer#DEFAULT_SEED} as the RNG seed.
+ * @param trainerType Loss function and optimisation method combination.
+ * @param cost Cost penalty for each incorrectly classified training point.
+ * @param maxIterations The maximum number of dataset iterations.
+ * @param terminationCriterion How close does the optimisation function need to be before terminating that subproblem (usually set to 0.1).
* @param epsilon The insensitivity of the regression loss to small differences.
*/
protected LibLinearTrainer(LibLinearType trainerType, double cost, int maxIterations, double terminationCriterion, double epsilon) {
+ this(trainerType,cost,maxIterations,terminationCriterion,epsilon,Trainer.DEFAULT_SEED);
+ }
+
+ /**
+ * Creates a trainer for a LibLinear model
+ * @param trainerType Loss function and optimisation method combination.
+ * @param cost Cost penalty for each incorrectly classified training point.
+ * @param maxIterations The maximum number of dataset iterations.
+ * @param terminationCriterion How close does the optimisation function need to be before terminating that subproblem (usually set to 0.1).
+ * @param epsilon The insensitivity of the regression loss to small differences.
+ * @param seed The RNG seed.
+ */
+ protected LibLinearTrainer(LibLinearType trainerType, double cost, int maxIterations, double terminationCriterion, double epsilon, long seed) {
this.trainerType = trainerType;
this.cost = cost;
this.maxIterations = maxIterations;
this.terminationCriterion = terminationCriterion;
this.epsilon = epsilon;
+ this.seed = seed;
postConfig();
}
@@ -120,6 +155,7 @@ protected LibLinearTrainer(LibLinearType trainerType, double cost, int maxIte
@Override
public void postConfig() {
libLinearParams = new Parameter(trainerType.getSolverType(),cost,terminationCriterion,maxIterations,epsilon);
+ rng = new SplittableRandom(seed);
Linear.disableDebugOutput();
}
@@ -134,27 +170,35 @@ public LibLinearModel train(Dataset examples, Map runP
}
@Override
- public synchronized LibLinearModel train(Dataset examples, Map runProvenance, int invocationCount) {
+ public LibLinearModel train(Dataset examples, Map runProvenance, int invocationCount) {
if (examples.getOutputInfo().getUnknownCount() > 0) {
throw new IllegalArgumentException("The supplied Dataset contained unknown Outputs, and this Trainer is supervised.");
}
+
+ // Creates a new RNG, adds one to the invocation count.
+ TrainerProvenance trainerProvenance;
+ SplittableRandom localRNG;
+ synchronized(this) {
+ if(invocationCount != INCREMENT_INVOCATION_COUNT) {
+ setInvocationCount(invocationCount);
+ }
+ localRNG = rng.split();
+ trainerProvenance = getProvenance();
+ trainInvocationCount++;
+ }
+
ImmutableFeatureMap featureIDMap = examples.getFeatureIDMap();
ImmutableOutputInfo outputIDInfo = examples.getOutputIDInfo();
- if(invocationCount != INCREMENT_INVOCATION_COUNT) {
- setInvocationCount(invocationCount);
- }
- TrainerProvenance trainerProvenance = getProvenance();
- ModelProvenance provenance = new ModelProvenance(LibLinearModel.class.getName(), OffsetDateTime.now(), examples.getProvenance(), trainerProvenance, runProvenance);
- trainInvocationCount++;
+ // Setup parameters and RNG
Parameter curParams = setupParameters(outputIDInfo);
+ curParams.setRandom(new Random(localRNG.nextLong()));
+
+ ModelProvenance provenance = new ModelProvenance(LibLinearModel.class.getName(), OffsetDateTime.now(), examples.getProvenance(), trainerProvenance, runProvenance);
Pair data = extractData(examples,outputIDInfo,featureIDMap);
- List models;
- synchronized (LibLinearTrainer.class) {
- models = trainModels(curParams, featureIDMap.size() + 1, data.getA(), data.getB());
- }
+ List models = trainModels(curParams, featureIDMap.size() + 1, data.getA(), data.getB());
return createModel(provenance,featureIDMap,outputIDInfo,models);
}
@@ -170,7 +214,11 @@ public synchronized void setInvocationCount(int invocationCount) {
throw new IllegalArgumentException("The supplied invocationCount is less than zero.");
}
- this.trainInvocationCount = invocationCount;
+ rng = new SplittableRandom(seed);
+
+ for (trainInvocationCount = 0; trainInvocationCount < invocationCount; trainInvocationCount++){
+ SplittableRandom localRNG = rng.split();
+ }
}
@Override
@@ -188,6 +236,8 @@ public String toString() {
buffer.append(libLinearParams.getMaxIters());
buffer.append(",regression-epsilon=");
buffer.append(libLinearParams.getP());
+ buffer.append(",seed=");
+ buffer.append(seed);
buffer.append(')');
return buffer.toString();
@@ -223,13 +273,13 @@ public String toString() {
protected abstract Pair extractData(Dataset data, ImmutableOutputInfo outputInfo, ImmutableFeatureMap featureMap);
/**
- * Constructs the parameters. Most of the time this is a no-op, but
+ * Constructs the parameters. Most of the time this just clones the existing ones, but
* classification overrides it to incorporate label weights if they exist.
* @param info The output info.
* @return The Parameters to use for training.
*/
protected Parameter setupParameters(ImmutableOutputInfo info) {
- return libLinearParams;
+ return libLinearParams.clone();
}
/**
diff --git a/Regression/LibLinear/src/main/java/org/tribuo/regression/liblinear/LibLinearRegressionTrainer.java b/Regression/LibLinear/src/main/java/org/tribuo/regression/liblinear/LibLinearRegressionTrainer.java
index 1db2c7b91..77bb1a3f3 100644
--- a/Regression/LibLinear/src/main/java/org/tribuo/regression/liblinear/LibLinearRegressionTrainer.java
+++ b/Regression/LibLinear/src/main/java/org/tribuo/regression/liblinear/LibLinearRegressionTrainer.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2015-2021, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2015, 2022, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -36,6 +36,7 @@
import java.util.ArrayList;
import java.util.List;
+import java.util.Random;
import java.util.logging.Logger;
/**
@@ -43,10 +44,6 @@
*
* This generates an independent liblinear model for each regression dimension.
*
- * Note the train method is synchronized on {@code LibLinearTrainer.class} due to a global RNG in liblinear-java.
- * This is insufficient to ensure reproducibility if liblinear-java is used directly in the same JVM as Tribuo, but
- * avoids locking on classes Tribuo does not control.
- *
* See:
*
* Fan RE, Chang KW, Hsieh CJ, Wang XR, Lin CJ.
@@ -64,6 +61,11 @@ public class LibLinearRegressionTrainer extends LibLinearTrainer {
private static final Logger logger = Logger.getLogger(LibLinearRegressionTrainer.class.getName());
+ /**
+ * Used in the tests for regression dimension re-ordering to revert to the 4.2 behaviour.
+ */
+ boolean forceZero = false;
+
/**
* Creates a trainer using the default values (L2R_L2LOSS_SVR, 1, 1000, 0.1, 0.1).
*/
@@ -83,6 +85,8 @@ public LibLinearRegressionTrainer(LinearRegressionType trainerType) {
/**
* Creates a trainer for a LibLinear regression model.
+ *
+ * Uses {@link Trainer#DEFAULT_SEED} as the RNG seed.
* @param trainerType Loss function and optimisation method combination.
* @param cost Cost penalty for each incorrectly classified training point.
* @param maxIterations The maximum number of dataset iterations.
@@ -90,7 +94,20 @@ public LibLinearRegressionTrainer(LinearRegressionType trainerType) {
* @param epsilon The insensitivity of the regression loss to small differences.
*/
public LibLinearRegressionTrainer(LinearRegressionType trainerType, double cost, int maxIterations, double terminationCriterion, double epsilon) {
- super(trainerType,cost,maxIterations,terminationCriterion,epsilon);
+ this(trainerType,cost,maxIterations,terminationCriterion,epsilon,Trainer.DEFAULT_SEED);
+ }
+
+ /**
+ * Creates a trainer for a LibLinear regression model.
+ * @param trainerType Loss function and optimisation method combination.
+ * @param cost Cost penalty for each incorrectly classified training point.
+ * @param maxIterations The maximum number of dataset iterations.
+ * @param terminationCriterion How close does the optimisation function need to be before terminating that subproblem (usually set to 0.1).
+ * @param epsilon The insensitivity of the regression loss to small differences.
+ * @param seed The RNG seed.
+ */
+ public LibLinearRegressionTrainer(LinearRegressionType trainerType, double cost, int maxIterations, double terminationCriterion, double epsilon, long seed) {
+ super(trainerType,cost,maxIterations,terminationCriterion,epsilon,seed);
}
/**
@@ -117,9 +134,14 @@ protected List trainModels(Parameter curParams, int numFeatures, FeatureN
data.n = numFeatures;
data.bias = 1.0;
- // Note this isn't sufficient for reproducibility as it doesn't cope with concurrency.
- // Concurrency safety is handled by the global lock on LibLinearTrainer.class in LibLinearTrainer.train.
- Linear.resetRandom();
+ /*
+ * Enforces the behaviour of Tribuo 4.2 and liblinear-java 2.43 to allow
+ * TestLibLinear.testThreeDenseData to validate that regression indices
+ * are handled correctly.
+ */
+ if (forceZero) {
+ curParams.setRandom(new Random(0));
+ }
models.add(Linear.train(data, curParams));
}
diff --git a/Regression/LibLinear/src/test/java/org/tribuo/regression/liblinear/TestLibLinear.java b/Regression/LibLinear/src/test/java/org/tribuo/regression/liblinear/TestLibLinear.java
index bc999a8a3..bb228c93e 100644
--- a/Regression/LibLinear/src/test/java/org/tribuo/regression/liblinear/TestLibLinear.java
+++ b/Regression/LibLinear/src/test/java/org/tribuo/regression/liblinear/TestLibLinear.java
@@ -1,5 +1,5 @@
/*
- * Copyright (c) 2015-2021, Oracle and/or its affiliates. All rights reserved.
+ * Copyright (c) 2015, 2022, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -103,7 +103,9 @@ public void testMultiDenseData() {
@Test
public void testThreeDenseData() {
Pair,Dataset> p = RegressionDataGenerator.threeDimDenseTrainTest(1.0, false);
- LibLinearModel llModel = t.train(p.getA());
+ LibLinearRegressionTrainer localTrainer = new LibLinearRegressionTrainer(new LinearRegressionType(LinearType.L2R_L2LOSS_SVR_DUAL),1.0,1000,0.1,0.5);
+ localTrainer.forceZero = true;
+ LibLinearModel llModel = localTrainer.train(p.getA());
RegressionEvaluation llEval = e.evaluate(llModel,p.getB());
double expectedDim1 = 0.6634367596601265;
double expectedDim2 = 0.6634367596601265;
@@ -116,7 +118,7 @@ public void testThreeDenseData() {
assertEquals(expectedAve,llEval.averageR2(),1e-6);
p = RegressionDataGenerator.threeDimDenseTrainTest(1.0, true);
- llModel = t.train(p.getA());
+ llModel = localTrainer.train(p.getA());
llEval = e.evaluate(llModel,p.getB());
assertEquals(expectedDim1,llEval.r2(new Regressor(RegressionDataGenerator.firstDimensionName,Double.NaN)),1e-6);
diff --git a/pom.xml b/pom.xml
index b7fd406c5..1a9262150 100644
--- a/pom.xml
+++ b/pom.xml
@@ -1,6 +1,6 @@
- 2.43
+ 2.44
3.25
1.9.0
0.4.1