Skip to content

Commit

Permalink
adding SOKNL wrapper in regressor.py
Browse files Browse the repository at this point in the history
  • Loading branch information
YibinSun committed Mar 5, 2024
1 parent da38b49 commit c679cae
Showing 1 changed file with 81 additions and 0 deletions.
81 changes: 81 additions & 0 deletions src/capymoa/learner/regressor/regressors.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from moa.classifiers.meta import (
AdaptiveRandomForestRegressor as MOA_AdaptiveRandomForestRegressor,
)
from moa.classifiers.meta import SelfOptimisingKNearestLeaves as MOA_SOKNL


class KNNRegressor(MOARegressor):
Expand Down Expand Up @@ -125,3 +126,83 @@ def __init__(
)
self.moa_learner.prepareForUse()
self.moa_learner.resetLearning()


class SOKNL(MOARegressor):
def __init__(
self,
schema=None,
CLI=None,
random_seed=1,
tree_learner=None,
ensemble_size=100,
max_features=0.6,
lambda_param=6.0, # m_features_mode=None, m_features_per_tree_size=60,
drift_detection_method=None,
warning_detection_method=None,
disable_drift_detection=False,
disable_background_learner=False,
self_optimising=True,
k_value=5,
):
# Important: must create the MOA object before invoking the super class __init__
self.moa_learner = MOA_SOKNL()
super().__init__(
schema=schema,
CLI=CLI,
random_seed=random_seed,
moa_learner=self.moa_learner,
)

# Initialize instance attributes with default values, CLI was not set.
if self.CLI is None:
self.tree_learner = (
"(SelfOptimisingBaseTree -s VarianceReductionSplitCriterion -g 50 -c 0.01)"
if tree_learner is None
else tree_learner
)
self.ensemble_size = ensemble_size

self.max_features = max_features
if isinstance(self.max_features, float) and 0.0 <= self.max_features <= 1.0:
self.m_features_mode = "(Percentage (M * (m / 100)))"
self.m_features_per_tree_size = int(self.max_features * 100)
elif isinstance(self.max_features, int):
self.m_features_mode = "(Specified m (integer value))"
self.m_features_per_tree_size = max_features
elif self.max_features in ["sqrt"]:
self.m_features_mode = "(sqrt(M)+1)"
self.m_features_per_tree_size = -1 # or leave it unchanged
elif self.max_features is None:
self.m_features_mode = "(Percentage (M * (m / 100)))"
self.m_features_per_tree_size = 60
else:
# Handle other cases or raise an exception if needed
raise ValueError("Invalid value for max_features")

# self.m_features_mode = "(Percentage (M * (m / 100)))" if m_features_mode is None else m_features_mode
# self.m_features_per_tree_size = m_features_per_tree_size
self.lambda_param = lambda_param
self.drift_detection_method = (
"(ADWINChangeDetector -a 1.0E-3)"
if drift_detection_method is None
else drift_detection_method
)
self.warning_detection_method = (
"(ADWINChangeDetector -a 1.0E-2)"
if warning_detection_method is None
else warning_detection_method
)
self.disable_drift_detection = disable_drift_detection
self.disable_background_learner = disable_background_learner

self.self_optimising = self_optimising
self.k_value = k_value

self.moa_learner.getOptions().setViaCLIString(
f"-l {self.tree_learner} -s {self.ensemble_size} {'-f' if self_optimising else ''} -k {k_value} -o {self.m_features_mode} -m \
{self.m_features_per_tree_size} -a {self.lambda_param} -x {self.drift_detection_method} -p \
{self.warning_detection_method} {'-u' if self.disable_drift_detection else ''} {'-q' if self.disable_background_learner else ''}"
)
self.moa_learner.prepareForUse()
self.moa_learner.resetLearning()

0 comments on commit c679cae

Please sign in to comment.