Skip to content

Commit

Permalink
Update Gradle Wrapper to 8.2.1 for Enhanced JDK20 Compatibility
Browse files Browse the repository at this point in the history
- **Reason for Upgrade**: The migration to JDK20 ([reference](https://github.com/opensearch-project/opensearch-build/blob/aa65a8ecd69f77c3d3104043dd1c48dff708bffa/manifests/3.0.0/opensearch-3.0.0.yml#L9)) rendered the current Gradle version (7.6.1) incompatible.

- **Actions Taken**:
  - **Gradle Wrapper Update**: Upgraded the Gradle wrapper to version 8.2.1 to maintain compatibility with JDK20. The gradle wrapper files are generated  using the `./gradlew wrapper` command.
  - Applied `spotless` due to new formatting requirements in Gradle 8.
  - Resolved test "jar hell" issues. Gradle 8 introduced internal JARs to the test classpath that conflicted with dependencies from `org.junit.vintage:junit-vintage-engine`. As a remedy, these conflicting JARs have been excluded.

- **Relevant Pull Requests**:
  - [Alerting#893](https://github.com/opensearch-project/alerting/pull/893/files)
  - [ML-Commons#892](opensearch-project/ml-commons#892)
  - [Security PR](opensearch-project/security#2978)

- **Verification**: Successfully verified the changes using `gradle build`.

Signed-off-by: Kaituo Li <[email protected]>
  • Loading branch information
kaituo committed Aug 17, 2023
1 parent 5ac6390 commit 89c5c8c
Show file tree
Hide file tree
Showing 61 changed files with 410 additions and 644 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/test_build_multi_platform.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ jobs:
Build-ad-windows:
strategy:
matrix:
java: [ 11, 17 ]
java: [ 11, 17, 20 ]
name: Build and Test Anomaly Detection Plugin on Windows
runs-on: windows-latest
steps:
Expand Down Expand Up @@ -39,7 +39,7 @@ jobs:
Build-ad:
strategy:
matrix:
java: [11,17]
java: [11,17,20]
os: [ubuntu-latest, macos-latest]
fail-fast: false

Expand Down
40 changes: 24 additions & 16 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,8 @@ buildscript {
}

plugins {
id 'nebula.ospackage' version "8.3.0" apply false
id "com.diffplug.gradle.spotless" version "3.26.1"
id 'com.netflix.nebula.ospackage' version "11.0.0"
id "com.diffplug.spotless" version "6.18.0"
id 'java-library'
// Gradle 7.6 support was added in test-retry 1.4.0.
id 'org.gradle.test-retry' version '1.4.1'
Expand Down Expand Up @@ -160,11 +160,12 @@ dependencies {
testImplementation group: 'net.bytebuddy', name: 'byte-buddy', version: '1.9.15'
testImplementation group: 'net.bytebuddy', name: 'byte-buddy-agent', version: '1.9.15'
testCompileOnly 'org.apiguardian:apiguardian-api:1.1.0'
testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.2'
testImplementation 'org.junit.jupiter:junit-jupiter-params:5.7.2'
testImplementation 'org.junit.jupiter:junit-jupiter-engine:5.7.2'
// jupiter is required to run unit tests not inherited from OpenSearchTestCase (e.g., PreviousValueImputerTests)
testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.2'
testImplementation 'org.junit.jupiter:junit-jupiter-params:5.8.2'
testImplementation 'org.junit.jupiter:junit-jupiter-engine:5.8.2'
testImplementation "org.opensearch:opensearch-core:${opensearch_version}"
testRuntimeOnly 'org.junit.vintage:junit-vintage-engine:5.7.2'
testRuntimeOnly 'org.junit.vintage:junit-vintage-engine:5.8.2'
testCompileOnly 'junit:junit:4.13.2'
}

Expand Down Expand Up @@ -302,6 +303,14 @@ test {
excludeTestsMatching "org.opensearch.ad.ml.HCADModelPerfTests"
}
}

/* Gradle 8 is including some of its own internal JARs into the test classpath, and there's
overlap with the dependencies org.junit.vintage:junit-vintage-engine pulling in. To prevent
jar hell, exclude these problematic JARs. */
classpath = classpath.filter {
!it.toString().contains("junit-platform-engine-1.8.2.jar") &&
!it.toString().contains("junit-platform-commons-1.8.2.jar")
}
}

task integTest(type: RestIntegTestTask) {
Expand Down Expand Up @@ -711,8 +720,8 @@ jacocoTestCoverageVerification {

jacocoTestReport {
reports {
xml.enabled = true
html.enabled = true
xml.required = true // for coverlay
html.required = true // human readable
}
}

Expand All @@ -722,10 +731,11 @@ jacocoTestCoverageVerification.dependsOn jacocoTestReport
compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked"

test {
// required to run unit tests not inherited from OpenSearchTestCase (e.g., PreviousValueImputerTests)
useJUnitPlatform()
}

apply plugin: 'nebula.ospackage'
apply plugin: 'com.netflix.nebula.ospackage'

// This is afterEvaluate because the bundlePlugin ZIP task is updated afterEvaluate and changes the ZIP name to match the plugin name
afterEvaluate {
Expand All @@ -735,7 +745,7 @@ afterEvaluate {
version = "${project.version}" - "-SNAPSHOT"

into '/usr/share/opensearch/plugins'
from(zipTree(bundlePlugin.archivePath)) {
from(zipTree(bundlePlugin.archiveFile)) {
into opensearchplugin.name
}

Expand Down Expand Up @@ -766,9 +776,8 @@ afterEvaluate {
task renameRpm(type: Copy) {
from("$buildDir/distributions")
into("$buildDir/distributions")
include archiveName
rename archiveName, "${packageName}-${version}.rpm"
doLast { delete file("$buildDir/distributions/$archiveName") }
rename "$archiveFileName", "${packageName}-${archiveVersion}.rpm"
doLast { delete file("$buildDir/distributions/$archiveFileName") }
}
}

Expand All @@ -779,9 +788,8 @@ afterEvaluate {
task renameDeb(type: Copy) {
from("$buildDir/distributions")
into("$buildDir/distributions")
include archiveName
rename archiveName, "${packageName}-${version}.deb"
doLast { delete file("$buildDir/distributions/$archiveName") }
rename "$archiveFileName", "${packageName}-${archiveVersion}.deb"
doLast { delete file("$buildDir/distributions/$archiveFileName") }
}
}

Expand Down
Binary file modified gradle/wrapper/gradle-wrapper.jar
Binary file not shown.
3 changes: 2 additions & 1 deletion gradle/wrapper/gradle-wrapper.properties
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.1-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-8.2.1-bin.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
16 changes: 10 additions & 6 deletions gradlew
Original file line number Diff line number Diff line change
Expand Up @@ -85,9 +85,6 @@ done
APP_BASE_NAME=${0##*/}
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit

# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'

# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum

Expand Down Expand Up @@ -133,26 +130,29 @@ location of your Java installation."
fi
else
JAVACMD=java
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
if ! command -v java >/dev/null 2>&1
then
die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
fi

# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
# shellcheck disable=SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
# shellcheck disable=SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
Expand Down Expand Up @@ -197,6 +197,10 @@ if "$cygwin" || "$msys" ; then
done
fi


# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'

# Collect all arguments for the java command;
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
# shell script including quotes and variable substitutions, so put them in
Expand Down
45 changes: 8 additions & 37 deletions src/main/java/org/opensearch/ad/AnomalyDetectorJobRunner.java
Original file line number Diff line number Diff line change
Expand Up @@ -307,38 +307,11 @@ private void runAnomalyDetectionJob(
detectionStartTime.toEpochMilli(),
executionStartTime.toEpochMilli()
);
client
.execute(
AnomalyResultAction.INSTANCE,
request,
ActionListener
.wrap(
response -> {
indexAnomalyResult(
jobParameter,
lockService,
lock,
detectionStartTime,
executionStartTime,
response,
recorder,
detector
);
},
exception -> {
handleAdException(
jobParameter,
lockService,
lock,
detectionStartTime,
executionStartTime,
exception,
recorder,
detector
);
}
)
);
client.execute(AnomalyResultAction.INSTANCE, request, ActionListener.wrap(response -> {
indexAnomalyResult(jobParameter, lockService, lock, detectionStartTime, executionStartTime, response, recorder, detector);
}, exception -> {
handleAdException(jobParameter, lockService, lock, detectionStartTime, executionStartTime, exception, recorder, detector);
}));
} catch (Exception e) {
indexAnomalyResultException(
jobParameter,
Expand Down Expand Up @@ -672,11 +645,9 @@ private void releaseLock(Job jobParameter, LockService lockService, LockModel lo
lockService
.release(
lock,
ActionListener
.wrap(
released -> { log.info("Released lock for AD job {}", jobParameter.getName()); },
exception -> { log.error("Failed to release lock for AD job: " + jobParameter.getName(), exception); }
)
ActionListener.wrap(released -> { log.info("Released lock for AD job {}", jobParameter.getName()); }, exception -> {
log.error("Failed to release lock for AD job: " + jobParameter.getName(), exception);
})
);
}
}
8 changes: 3 additions & 5 deletions src/main/java/org/opensearch/ad/AnomalyDetectorRunner.java
Original file line number Diff line number Diff line change
Expand Up @@ -84,11 +84,9 @@ public void executeDetector(
listener.onResponse(Collections.emptyList());
return;
}
ActionListener<EntityAnomalyResult> entityAnomalyResultListener = ActionListener
.wrap(
entityAnomalyResult -> { listener.onResponse(entityAnomalyResult.getAnomalyResults()); },
e -> onFailure(e, listener, detector.getId())
);
ActionListener<EntityAnomalyResult> entityAnomalyResultListener = ActionListener.wrap(entityAnomalyResult -> {
listener.onResponse(entityAnomalyResult.getAnomalyResults());
}, e -> onFailure(e, listener, detector.getId()));
MultiResponsesDelegateActionListener<EntityAnomalyResult> multiEntitiesResponseListener =
new MultiResponsesDelegateActionListener<EntityAnomalyResult>(
entityAnomalyResultListener,
Expand Down
29 changes: 11 additions & 18 deletions src/main/java/org/opensearch/ad/caching/PriorityCache.java
Original file line number Diff line number Diff line change
Expand Up @@ -133,12 +133,9 @@ public PriorityCache(
Duration inactiveEntityTtl = DateUtils.toDuration(checkpointTtl.get(settings));

this.inActiveEntities = createInactiveCache(inactiveEntityTtl, maxInactiveStates);
clusterService
.getClusterSettings()
.addSettingsUpdateConsumer(
checkpointTtl,
it -> { this.inActiveEntities = createInactiveCache(DateUtils.toDuration(it), maxInactiveStates); }
);
clusterService.getClusterSettings().addSettingsUpdateConsumer(checkpointTtl, it -> {
this.inActiveEntities = createInactiveCache(DateUtils.toDuration(it), maxInactiveStates);
});

this.threadPool = threadPool;
this.random = new Random(42);
Expand All @@ -163,19 +160,15 @@ public ModelState<EntityModel> get(String modelId, AnomalyDetector detector) {
// during maintenance period, stop putting new entries
if (!maintenanceLock.isLocked() && modelState == null) {
if (ADEnabledSetting.isDoorKeeperInCacheEnabled()) {
DoorKeeper doorKeeper = doorKeepers
.computeIfAbsent(
detectorId,
id -> {
// reset every 60 intervals
return new DoorKeeper(
TimeSeriesSettings.DOOR_KEEPER_FOR_CACHE_MAX_INSERTION,
TimeSeriesSettings.DOOR_KEEPER_FALSE_POSITIVE_RATE,
detector.getIntervalDuration().multipliedBy(TimeSeriesSettings.DOOR_KEEPER_MAINTENANCE_FREQ),
clock
);
}
DoorKeeper doorKeeper = doorKeepers.computeIfAbsent(detectorId, id -> {
// reset every 60 intervals
return new DoorKeeper(
TimeSeriesSettings.DOOR_KEEPER_FOR_CACHE_MAX_INSERTION,
TimeSeriesSettings.DOOR_KEEPER_FALSE_POSITIVE_RATE,
detector.getIntervalDuration().multipliedBy(TimeSeriesSettings.DOOR_KEEPER_MAINTENANCE_FREQ),
clock
);
});

// first hit, ignore
// since door keeper may get reset during maintenance, it is possible
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,19 +72,9 @@ public void clusterChanged(ClusterChangedEvent event) {
if (delta.removed() || delta.added()) {
LOG.info(NODE_CHANGED_MSG + ", node removed: {}, node added: {}", delta.removed(), delta.added());
hashRing.addNodeChangeEvent();
hashRing
.buildCircles(
delta,
ActionListener
.runAfter(
ActionListener
.wrap(
hasRingBuildDone -> { LOG.info("Hash ring build result: {}", hasRingBuildDone); },
e -> { LOG.error("Failed updating AD version hash ring", e); }
),
() -> inProgress.release()
)
);
hashRing.buildCircles(delta, ActionListener.runAfter(ActionListener.wrap(hasRingBuildDone -> {
LOG.info("Hash ring build result: {}", hasRingBuildDone);
}, e -> { LOG.error("Failed updating AD version hash ring", e); }), () -> inProgress.release()));
} else {
inProgress.release();
}
Expand Down
31 changes: 11 additions & 20 deletions src/main/java/org/opensearch/ad/cluster/DailyCron.java
Original file line number Diff line number Diff line change
Expand Up @@ -58,26 +58,17 @@ public void run() {
)
)
.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN);
clientUtil
.execute(
DeleteByQueryAction.INSTANCE,
deleteRequest,
ActionListener
.wrap(
response -> {
// if 0 docs get deleted, it means our query cannot find any matching doc
LOG.info("{} " + CHECKPOINT_DELETED_MSG, response.getDeleted());
},
exception -> {
if (exception instanceof IndexNotFoundException) {
LOG.info(CHECKPOINT_NOT_EXIST_MSG);
} else {
// Gonna eventually delete in maintenance window.
LOG.error(CANNOT_DELETE_OLD_CHECKPOINT_MSG, exception);
}
}
)
);
clientUtil.execute(DeleteByQueryAction.INSTANCE, deleteRequest, ActionListener.wrap(response -> {
// if 0 docs get deleted, it means our query cannot find any matching doc
LOG.info("{} " + CHECKPOINT_DELETED_MSG, response.getDeleted());
}, exception -> {
if (exception instanceof IndexNotFoundException) {
LOG.info(CHECKPOINT_NOT_EXIST_MSG);
} else {
// Gonna eventually delete in maintenance window.
LOG.error(CANNOT_DELETE_OLD_CHECKPOINT_MSG, exception);
}
}));
}

}
10 changes: 3 additions & 7 deletions src/main/java/org/opensearch/ad/cluster/HashRing.java
Original file line number Diff line number Diff line change
Expand Up @@ -177,13 +177,9 @@ public void buildCirclesForRealtimeAD() {
if (nodeChangeEvents.isEmpty()) {
return;
}
buildCircles(
ActionListener
.wrap(
r -> { LOG.debug("build circles on AD versions successfully"); },
e -> { LOG.error("Failed to build circles on AD versions", e); }
)
);
buildCircles(ActionListener.wrap(r -> { LOG.debug("build circles on AD versions successfully"); }, e -> {
LOG.error("Failed to build circles on AD versions", e);
}));
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,12 +66,12 @@ public void run() {
.lte(clock.millis() - defaultCheckpointTtl.toMillis())
.format(ADCommonName.EPOCH_MILLIS_FORMAT)
),
ActionListener
.wrap(
response -> { cleanupBasedOnShardSize(defaultCheckpointTtl.minusDays(1)); },
// The docs will be deleted in next scheduled windows. No need for retrying.
exception -> LOG.error("delete docs by query fails for checkpoint index", exception)
)
ActionListener.wrap(response -> {
cleanupBasedOnShardSize(defaultCheckpointTtl.minusDays(1));
},
// The docs will be deleted in next scheduled windows. No need for retrying.
exception -> LOG.error("delete docs by query fails for checkpoint index", exception)
)
);

}
Expand Down
Loading

0 comments on commit 89c5c8c

Please sign in to comment.