Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Backport 2.x] upgrade gradle to 8.5 #942

Merged
merged 2 commits into from
Jan 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ jobs:
java:
- 11
- 17
- 21
runs-on: ubuntu-latest
container:
# using the same image which is used by opensearch-build team to build the OpenSearch Distribution
Expand Down Expand Up @@ -68,6 +69,7 @@ jobs:
java:
- 11
- 17
- 21
runs-on: windows-latest

steps:
Expand Down
7 changes: 3 additions & 4 deletions build-tools/esplugin-coverage.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,8 @@ jacocoTestReport {
sourceDirectories.from = "src/main/kotlin"
classDirectories.from = sourceSets.main.output
reports {
html.enabled = true // human readable
xml.enabled = true // for coverlay
html.required = true // human readable
xml.required = true // for coverlay
}
}

Expand All @@ -65,8 +65,7 @@ allprojects{

testClusters.integTest {
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
// Replacing build with absolute path to fix the error "error opening zip file or JAR manifest missing : /build/tmp/expandedArchives/..../jacocoagent.jar"
jvmArgs " ${dummyIntegTest.jacoco.getAsJvmArg()}".replace('build',"${buildDir}")
jvmArgs " ${dummyIntegTest.jacoco.getAsJvmArg()}"
} else {
jvmArgs " ${dummyIntegTest.jacoco.getAsJvmArg()}".replace('javaagent:','javaagent:/')
}
Expand Down
18 changes: 1 addition & 17 deletions build-tools/pkgbuild.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
* SPDX-License-Identifier: Apache-2.0
*/

apply plugin: 'nebula.ospackage'
apply plugin: 'com.netflix.nebula.ospackage'

// This is afterEvaluate because the bundlePlugin ZIP task is updated afterEvaluate and changes the ZIP name to match the plugin name
afterEvaluate {
Expand Down Expand Up @@ -40,26 +40,10 @@ afterEvaluate {
buildRpm {
arch = 'NOARCH'
dependsOn 'assemble'
finalizedBy 'renameRpm'
task renameRpm(type: Copy) {
from("$buildDir/distributions")
into("$buildDir/distributions")
include archiveName
rename archiveName, "${packageName}-${version}.rpm"
doLast { delete file("$buildDir/distributions/$archiveName") }
}
}

buildDeb {
arch = 'all'
dependsOn 'assemble'
finalizedBy 'renameDeb'
task renameDeb(type: Copy) {
from("$buildDir/distributions")
into("$buildDir/distributions")
include archiveName
rename archiveName, "${packageName}-${version}.deb"
doLast { delete file("$buildDir/distributions/$archiveName") }
}
}
}
196 changes: 191 additions & 5 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,13 @@ buildscript {
classpath "${opensearch_group}.gradle:build-tools:${opensearch_version}"
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:${kotlin_version}"
classpath "org.jetbrains.kotlin:kotlin-allopen:${kotlin_version}"
classpath "io.gitlab.arturbosch.detekt:detekt-gradle-plugin:1.22.0"
classpath "org.jacoco:org.jacoco.agent:0.8.8"
classpath "io.gitlab.arturbosch.detekt:detekt-gradle-plugin:1.23.4"
classpath "org.jacoco:org.jacoco.agent:0.8.11"
}
}

plugins {
id 'nebula.ospackage' version "8.3.0"
id "com.netflix.nebula.ospackage-base" version "11.6.0"
id "com.dorongold.task-tree" version "1.5"
id 'java-library'
}
Expand Down Expand Up @@ -132,6 +132,8 @@ configurations.all {
if (it.state != Configuration.State.UNRESOLVED) return
resolutionStrategy {
force "org.jetbrains.kotlin:kotlin-stdlib:${kotlin_version}"
force "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${kotlin_version}"
force "org.jetbrains.kotlin:kotlin-reflect:${kotlin_version}"
force "org.jetbrains.kotlin:kotlin-stdlib-common:${kotlin_version}"
force "com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:2.11.4"
force "org.yaml:snakeyaml:2.0"
Expand Down Expand Up @@ -324,6 +326,189 @@ testClusters.integTest {
setting 'path.repo', repo.absolutePath
}

String baseVersion = "1.3.2"
String bwcVersion = baseVersion + ".0"
String currentVersion = opensearch_version.replace("-SNAPSHOT","")
String baseName = "reportsSchedulerBwcCluster"
String bwcFilePath = "src/test/resources/bwc"
String bwcReportsPlugin = "opensearch-reports-scheduler-" + bwcVersion + ".zip"
String bwcJobSchedulerPlugin = "opensearch-job-scheduler-" + bwcVersion + ".zip"
String bwcReportsSchedulerURL = "https://ci.opensearch.org/ci/dbc/distribution-build-opensearch/1.3.2/latest/linux/x64/tar/builds/opensearch/plugins/" + bwcReportsPlugin
String bwcJobSchedulerURL = "https://ci.opensearch.org/ci/dbc/distribution-build-opensearch/1.3.2/latest/linux/x64/tar/builds/opensearch/plugins/" + bwcJobSchedulerPlugin

2.times {i ->
testClusters {
"${baseName}$i" {
testDistribution = "ARCHIVE"
versions = [baseVersion, opensearch_version]
numberOfNodes = 3
plugin(provider(new Callable<RegularFile>(){
@Override
RegularFile call() throws Exception {
return new RegularFile() {
@Override
File getAsFile() {
File dir = new File(bwcFilePath + "/job-scheduler/" + bwcVersion)
if (!dir.exists()) {
dir.mkdirs()
}
File file = new File(dir, "opensearch-job-scheduler-" + bwcVersion + ".zip")
if (!file.exists()) {
new URL(bwcJobSchedulerURL).withInputStream{ ins -> file.withOutputStream{ it << ins }}
}
return fileTree(bwcFilePath + "/job-scheduler/" + bwcVersion).getSingleFile()
}
}
}
}))
plugin(provider(new Callable<RegularFile>(){
@Override
RegularFile call() throws Exception {
return new RegularFile() {
@Override
File getAsFile() {
File dir = new File(bwcFilePath + "/reports-scheduler/" + bwcVersion)
if (!dir.exists()) {
dir.mkdirs()
}
File file = new File(dir, "opensearch-reports-scheduler-" + bwcVersion + ".zip")
if (!file.exists()) {
new URL(bwcReportsSchedulerURL).withInputStream{ ins -> file.withOutputStream{ it << ins }}
}
return fileTree(bwcFilePath + "/reports-scheduler/" + bwcVersion).getSingleFile()
}
}
}
}))
setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}"
setting 'http.content_type.required', 'true'
}
}
}

List<Provider<RegularFile>> plugins = []

// Ensure the artifact for the current project version is available to be used for the bwc tests
task prepareBwcTests {
dependsOn bundle
doLast {
plugins = [
provider(new Callable<RegularFile>(){
@Override
RegularFile call() throws Exception {
return new RegularFile() {
@Override
File getAsFile() {
return configurations.zipArchive.asFileTree.getSingleFile()
}
}
}
}),
project.getObjects().fileProperty().value(bundle.getArchiveFile())
]
}
}

// Create two test clusters with 3 nodes of the old version
2.times {i ->
task "${baseName}#oldVersionClusterTask$i"(type: StandaloneRestIntegTestTask) {
dependsOn 'prepareBwcTests'
useCluster testClusters."${baseName}$i"
filter {
includeTestsMatching "org.opensearch.integTest.bwc.*IT"
}
systemProperty 'tests.rest.bwcsuite', 'old_cluster'
systemProperty 'tests.rest.bwcsuite_round', 'old'
systemProperty 'tests.plugin_bwc_version', bwcVersion
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}$i".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}$i".getName()}")
}
}

// Upgrade one node of the old cluster to new OpenSearch version with upgraded plugin version.
// This results in a mixed cluster with 2 nodes on the old version and 1 upgraded node.
// This is also used as a one third upgraded cluster for a rolling upgrade.
task "${baseName}#mixedClusterTask"(type: StandaloneRestIntegTestTask) {
useCluster testClusters."${baseName}0"
dependsOn "${baseName}#oldVersionClusterTask0"
doFirst {
testClusters."${baseName}0".upgradeNodeAndPluginToNextVersion(plugins)
}
filter {
includeTestsMatching "org.opensearch.integTest.bwc.*IT"
}
systemProperty 'tests.rest.bwcsuite', 'mixed_cluster'
systemProperty 'tests.rest.bwcsuite_round', 'first'
systemProperty 'tests.plugin_bwc_version', bwcVersion
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}0".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}0".getName()}")
}

// Upgrade the second node to new OpenSearch version with upgraded plugin version after the first node is upgraded.
// This results in a mixed cluster with 1 node on the old version and 2 upgraded nodes.
// This is used for rolling upgrade.
task "${baseName}#twoThirdsUpgradedClusterTask"(type: StandaloneRestIntegTestTask) {
dependsOn "${baseName}#mixedClusterTask"
useCluster testClusters."${baseName}0"
doFirst {
testClusters."${baseName}0".upgradeNodeAndPluginToNextVersion(plugins)
}
filter {
includeTestsMatching "org.opensearch.integTest.bwc.*IT"
}
systemProperty 'tests.rest.bwcsuite', 'mixed_cluster'
systemProperty 'tests.rest.bwcsuite_round', 'second'
systemProperty 'tests.plugin_bwc_version', bwcVersion
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}0".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}0".getName()}")
}

// Upgrade the third node to new OpenSearch version with upgraded plugin version after the second node is upgraded.
// This results in a fully upgraded cluster.
// This is used for rolling upgrade.
task "${baseName}#rollingUpgradeClusterTask"(type: StandaloneRestIntegTestTask) {
dependsOn "${baseName}#twoThirdsUpgradedClusterTask"
useCluster testClusters."${baseName}0"
doFirst {
testClusters."${baseName}0".upgradeNodeAndPluginToNextVersion(plugins)
}
filter {
includeTestsMatching "org.opensearch.integTest.bwc.*IT"
}
mustRunAfter "${baseName}#mixedClusterTask"
systemProperty 'tests.rest.bwcsuite', 'mixed_cluster'
systemProperty 'tests.rest.bwcsuite_round', 'third'
systemProperty 'tests.plugin_bwc_version', bwcVersion
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}0".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}0".getName()}")
}

// Upgrade all the nodes of the old cluster to new OpenSearch version with upgraded plugin version
// at the same time resulting in a fully upgraded cluster.
task "${baseName}#fullRestartClusterTask"(type: StandaloneRestIntegTestTask) {
dependsOn "${baseName}#oldVersionClusterTask1"
useCluster testClusters."${baseName}1"
doFirst {
testClusters."${baseName}1".upgradeAllNodesAndPluginsToNextVersion(plugins)
}
filter {
includeTestsMatching "org.opensearch.integTest.bwc.*IT"
}
systemProperty 'tests.rest.bwcsuite', 'upgraded_cluster'
systemProperty 'tests.plugin_bwc_version', bwcVersion
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}1".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}1".getName()}")
}

// A bwc test suite which runs all the bwc tasks combined
task bwcTestSuite(type: StandaloneRestIntegTestTask) {
exclude '**/*Test*'
exclude '**/*IT*'
dependsOn tasks.named("${baseName}#mixedClusterTask")
dependsOn tasks.named("${baseName}#rollingUpgradeClusterTask")
dependsOn tasks.named("${baseName}#fullRestartClusterTask")
}

task integTestRemote(type: RestIntegTestTask) {
testClassesDirs = sourceSets.test.output.classesDirs
classpath = sourceSets.test.runtimeClasspath
Expand Down Expand Up @@ -355,7 +540,7 @@ run {

task ktlint(type: JavaExec, group: "verification") {
description = "Check Kotlin code style."
main = "com.pinterest.ktlint.Main"
mainClass = "com.pinterest.ktlint.Main"
classpath = configurations.ktlint
args "src/**/*.kt"
// to generate report in checkstyle format prepend following args:
Expand All @@ -367,9 +552,10 @@ check.dependsOn ktlint

task ktlintFormat(type: JavaExec, group: "formatting") {
description = "Fix Kotlin code style deviations."
main = "com.pinterest.ktlint.Main"
mainClass = "com.pinterest.ktlint.Main"
classpath = configurations.ktlint
args "-F", "src/**/*.kt"
jvmArgs "--add-opens=java.base/java.lang=ALL-UNNAMED"
}

compileKotlin { kotlinOptions.freeCompilerArgs = ['-Xjsr305=strict'] }
Expand Down
Binary file modified gradle/wrapper/gradle-wrapper.jar
Binary file not shown.
3 changes: 2 additions & 1 deletion gradle/wrapper/gradle-wrapper.properties
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.1-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-all.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
29 changes: 17 additions & 12 deletions gradlew
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,8 @@ done
# This is normally unused
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit

# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit

# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
Expand Down Expand Up @@ -133,26 +131,29 @@ location of your Java installation."
fi
else
JAVACMD=java
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
if ! command -v java >/dev/null 2>&1
then
die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
fi

# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
# shellcheck disable=SC2039,SC3045
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
# shellcheck disable=SC3045
# shellcheck disable=SC2039,SC3045
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
Expand Down Expand Up @@ -197,11 +198,15 @@ if "$cygwin" || "$msys" ; then
done
fi

# Collect all arguments for the java command;
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
# shell script including quotes and variable substitutions, so put them in
# double quotes to make sure that they get re-expanded; and
# * put everything else in single quotes, so that it's not re-expanded.

# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'

# Collect all arguments for the java command:
# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
# and any embedded shellness will be escaped.
# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be
# treated as '${Hostname}' itself on the command line.

set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,8 @@ class ReportsSchedulerBackwardsCompatibilityIT : PluginRestTestCase() {
val pluginNames = plugins.map { plugin -> plugin["name"] }.toSet()
when (CLUSTER_TYPE) {
ClusterType.OLD -> {
assertTrue(pluginNames.contains("opendistro-reports-scheduler"))
assertTrue(pluginNames.contains("opendistro-job-scheduler"))
assertTrue(pluginNames.contains("opensearch-reports-scheduler"))
assertTrue(pluginNames.contains("opensearch-job-scheduler"))
createBasicReportDefinition()
}
ClusterType.MIXED -> {
Expand Down
Loading