Skip to content

Commit

Permalink
Fix PowerMock related tests due to version bump
Browse files Browse the repository at this point in the history
2.2 added a new dependency of ClusterSettings like TaskResourceTrackingService. It is possible that the classloader that loadsTaskResourceTrackingService is different from the classloader that loads PowerMock and related dependencies. PowerMock reports java.lang.NoClassDefFoundError when initializing ClusterSettings. Since we are not actually using the value of ClusterSettings in the tests, we make it null to avoid initializing it. The change fixed failed tests.

This PR also fixed spotless errors in FakeNode due to recent changes.

Testing done:
* gradle build

Signed-off-by: Kaituo Li <kaituo@amazon.com>
  • Loading branch information
kaituo committed Aug 9, 2022
1 parent 103f034 commit 1aa3c79
Show file tree
Hide file tree
Showing 7 changed files with 39 additions and 53 deletions.
2 changes: 1 addition & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -685,7 +685,7 @@ dependencies {
testImplementation group: 'org.powermock', name: 'powermock-module-junit4-common', version: '2.0.2'
testImplementation group: 'org.powermock', name: 'powermock-core', version: '2.0.2'
testImplementation group: 'org.powermock', name: 'powermock-api-support', version: '2.0.2'
testImplementation group: 'org.powermock', name: 'powermock-reflect', version: '2.0.7'
testImplementation group: 'org.powermock', name: 'powermock-reflect', version: '2.0.2'
testImplementation group: 'org.objenesis', name: 'objenesis', version: '3.0.1'
testImplementation group: 'net.bytebuddy', name: 'byte-buddy', version: '1.9.15'
testImplementation group: 'net.bytebuddy', name: 'byte-buddy-agent', version: '1.9.15'
Expand Down
9 changes: 6 additions & 3 deletions src/main/java/org/opensearch/ad/MemoryTracker.java
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,12 @@ public MemoryTracker(
this.heapSize = jvmService.info().getMem().getHeapMax().getBytes();
this.heapLimitBytes = (long) (heapSize * modelMaxSizePercentage);
this.desiredModelSize = (long) (heapSize * modelDesiredSizePercentage);
clusterService
.getClusterSettings()
.addSettingsUpdateConsumer(MODEL_MAX_SIZE_PERCENTAGE, it -> this.heapLimitBytes = (long) (heapSize * it));
if (clusterService != null) {
clusterService
.getClusterSettings()
.addSettingsUpdateConsumer(MODEL_MAX_SIZE_PERCENTAGE, it -> this.heapLimitBytes = (long) (heapSize * it));
}

this.thresholdModelBytes = 180_000;
this.adCircuitBreakerService = adCircuitBreakerService;
}
Expand Down
8 changes: 6 additions & 2 deletions src/main/java/org/opensearch/ad/feature/SearchFeatureDao.java
Original file line number Diff line number Diff line change
Expand Up @@ -114,9 +114,13 @@ public SearchFeatureDao(
this.interpolator = interpolator;
this.clientUtil = clientUtil;
this.maxEntitiesForPreview = maxEntitiesForPreview;
clusterService.getClusterSettings().addSettingsUpdateConsumer(MAX_ENTITIES_FOR_PREVIEW, it -> this.maxEntitiesForPreview = it);

this.pageSize = pageSize;
clusterService.getClusterSettings().addSettingsUpdateConsumer(PAGE_SIZE, it -> this.pageSize = it);

if (clusterService != null) {
clusterService.getClusterSettings().addSettingsUpdateConsumer(MAX_ENTITIES_FOR_PREVIEW, it -> this.maxEntitiesForPreview = it);
clusterService.getClusterSettings().addSettingsUpdateConsumer(PAGE_SIZE, it -> this.pageSize = it);
}
this.minimumDocCountForPreview = minimumDocCount;
this.previewTimeoutInMilliseconds = previewTimeoutInMilliseconds;
this.clock = clock;
Expand Down
8 changes: 5 additions & 3 deletions src/main/java/org/opensearch/ad/ml/ModelManager.java
Original file line number Diff line number Diff line change
Expand Up @@ -148,9 +148,11 @@ public ModelManager(
this.minPreviewSize = minPreviewSize;
this.modelTtl = modelTtl;
this.checkpointInterval = DateUtils.toDuration(checkpointIntervalSetting.get(settings));
clusterService
.getClusterSettings()
.addSettingsUpdateConsumer(checkpointIntervalSetting, it -> this.checkpointInterval = DateUtils.toDuration(it));
if (clusterService != null) {
clusterService
.getClusterSettings()
.addSettingsUpdateConsumer(checkpointIntervalSetting, it -> this.checkpointInterval = DateUtils.toDuration(it));
}

this.forests = new TRCFMemoryAwareConcurrentHashmap<>(memoryTracker);
this.thresholds = new ConcurrentHashMap<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -82,7 +81,6 @@
import org.opensearch.ad.util.ParseUtils;
import org.opensearch.client.Client;
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.time.DateFormatter;
import org.opensearch.common.xcontent.LoggingDeprecationHandler;
Expand Down Expand Up @@ -197,25 +195,9 @@ public void setup() throws Exception {
}).when(executorService).execute(any(Runnable.class));

settings = Settings.EMPTY;
ClusterSettings clusterSettings = new ClusterSettings(
Settings.EMPTY,
Collections
.unmodifiableSet(
new HashSet<>(Arrays.asList(AnomalyDetectorSettings.MAX_ENTITIES_FOR_PREVIEW, AnomalyDetectorSettings.PAGE_SIZE))
)
);
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);

searchFeatureDao = spy(
new SearchFeatureDao(
client,
xContent,
interpolator,
clientUtil,
settings,
clusterService,
AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE
)
new SearchFeatureDao(client, xContent, interpolator, clientUtil, settings, null, AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE)
);

detectionInterval = new IntervalTimeConfiguration(1, ChronoUnit.MINUTES);
Expand Down
25 changes: 3 additions & 22 deletions src/test/java/org/opensearch/ad/ml/ModelManagerTests.java
Original file line number Diff line number Diff line change
Expand Up @@ -34,12 +34,10 @@
import java.util.ArrayDeque;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.stream.Collectors;
import java.util.stream.Stream;
Expand Down Expand Up @@ -77,7 +75,6 @@
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.collect.ImmutableOpenMap;
import org.opensearch.common.settings.ClusterSettings;
import org.opensearch.common.settings.Setting;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.monitor.jvm.JvmService;
Expand All @@ -92,7 +89,6 @@
import com.amazon.randomcutforest.parkservices.AnomalyDescriptor;
import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest;
import com.amazon.randomcutforest.returntypes.DiVector;
import com.google.common.collect.Sets;

@RunWith(PowerMockRunner.class)
@PowerMockRunnerDelegate(JUnitParamsRunner.class)
Expand Down Expand Up @@ -229,13 +225,6 @@ public void setup() {
memoryTracker = mock(MemoryTracker.class);
when(memoryTracker.isHostingAllowed(anyString(), any())).thenReturn(true);

clusterSettings = new ClusterSettings(
Settings.EMPTY,
Collections.unmodifiableSet(new HashSet<>(Arrays.asList(AnomalyDetectorSettings.CHECKPOINT_SAVING_FREQ)))
);
clusterService = mock(ClusterService.class);
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);

settings = Settings
.builder()
.put("plugins.anomaly_detection.model_max_size_percent", modelMaxSizePercentage)
Expand All @@ -258,7 +247,7 @@ public void setup() {
featureManager,
memoryTracker,
settings,
clusterService
null
)
);

Expand Down Expand Up @@ -424,20 +413,12 @@ public void getRcfResult_throwToListener_whenHeapLimitExceed() {
}).when(checkpointDao).getTRCFModel(eq(rcfModelId), any(ActionListener.class));

when(jvmService.info().getMem().getHeapMax().getBytes()).thenReturn(1_000L);
final Set<Setting<?>> settingsSet = Stream
.concat(
ClusterSettings.BUILT_IN_CLUSTER_SETTINGS.stream(),
Sets.newHashSet(AnomalyDetectorSettings.MODEL_MAX_SIZE_PERCENTAGE, AnomalyDetectorSettings.CHECKPOINT_SAVING_FREQ).stream()
)
.collect(Collectors.toSet());
ClusterSettings clusterSettings = new ClusterSettings(settings, settingsSet);
clusterService = new ClusterService(settings, clusterSettings, null);

MemoryTracker memoryTracker = new MemoryTracker(
jvmService,
modelMaxSizePercentage,
modelDesiredSizePercentage,
clusterService,
null,
adCircuitBreakerService
);

Expand All @@ -460,7 +441,7 @@ public void getRcfResult_throwToListener_whenHeapLimitExceed() {
featureManager,
memoryTracker,
settings,
clusterService
null
)
);

Expand Down
20 changes: 17 additions & 3 deletions src/test/java/test/org/opensearch/ad/util/FakeNode.java
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,12 @@ public TransportAddress[] addressesFromString(String address) {
Collections.emptySet()
) {
@Override
protected TaskManager createTaskManager(Settings settings, ClusterSettings clusterSettings, ThreadPool threadPool, Set<String> taskHeaders) {
protected TaskManager createTaskManager(
Settings settings,
ClusterSettings clusterSettings,
ThreadPool threadPool,
Set<String> taskHeaders
) {
if (MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.get(settings)) {
return new MockTaskManager(settings, threadPool, taskHeaders);
} else {
Expand All @@ -110,8 +115,17 @@ protected TaskManager createTaskManager(Settings settings, ClusterSettings clust
clusterService = createClusterService(threadPool, discoveryNode.get(), clusterSettings);
clusterService.addStateApplier(transportService.getTaskManager());
ActionFilters actionFilters = new ActionFilters(emptySet());
TaskResourceTrackingService taskResourceTrackingService = new TaskResourceTrackingService(Settings.EMPTY, clusterService.getClusterSettings(), threadPool);
transportListTasksAction = new TransportListTasksAction(clusterService, transportService, actionFilters, taskResourceTrackingService);
TaskResourceTrackingService taskResourceTrackingService = new TaskResourceTrackingService(
Settings.EMPTY,
clusterService.getClusterSettings(),
threadPool
);
transportListTasksAction = new TransportListTasksAction(
clusterService,
transportService,
actionFilters,
taskResourceTrackingService
);
transportCancelTasksAction = new TransportCancelTasksAction(clusterService, transportService, actionFilters);
transportService.acceptIncomingRequests();
}
Expand Down

0 comments on commit 1aa3c79

Please sign in to comment.