Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Backport to 2.10] upgrading commons-lang3 version to fix conflict issue #1014

Merged
merged 2 commits into from
Sep 5, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 13 additions & 13 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ buildscript {

// gradle build won't print logs during test by default unless there is a failure.
// It is useful to record intermediately information like prediction precision and recall.
// This option turn on log printing during tests.
// This option turn on log printing during tests.
printLogs = "true" == System.getProperty("test.logs", "false")
}

Expand Down Expand Up @@ -184,7 +184,7 @@ publishing {
}
}
}

repositories {
maven {
name = "Snapshots"
Expand Down Expand Up @@ -414,7 +414,7 @@ String bwcFilePath = "src/test/resources/org/opensearch/ad/bwc/"
String bwcJobSchedulerPath = bwcFilePath + "job-scheduler/"
String bwcAnomalyDetectionPath = bwcFilePath + "anomaly-detection/"

2.times {i ->
2.times {i ->
testClusters {
"${baseName}$i" {
testDistribution = "ARCHIVE"
Expand Down Expand Up @@ -515,15 +515,15 @@ List<Provider<RegularFile>> plugins = [
return new RegularFile() {
@Override
File getAsFile() {
return fileTree(bwcFilePath + "anomaly-detection/" + project.version).getSingleFile()
}
return fileTree(bwcFilePath + "anomaly-detection/" + project.version).getSingleFile()
}
}
}
})
]
]

// Creates 2 test clusters with 3 nodes of the old version.
2.times {i ->
// Creates 2 test clusters with 3 nodes of the old version.
2.times {i ->
task "${baseName}#oldVersionClusterTask$i"(type: StandaloneRestIntegTestTask) {
useCluster testClusters."${baseName}$i"
filter {
Expand All @@ -534,10 +534,10 @@ List<Provider<RegularFile>> plugins = [
systemProperty 'tests.plugin_bwc_version', bwcVersion
nonInputProperties.systemProperty('tests.rest.cluster', "${-> testClusters."${baseName}$i".allHttpSocketURI.join(",")}")
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}$i".getName()}")
}
}
}

// Upgrades one node of the old cluster to new OpenSearch version with upgraded plugin version
// Upgrades one node of the old cluster to new OpenSearch version with upgraded plugin version
// This results in a mixed cluster with 2 nodes on the old version and 1 upgraded node.
// This is also used as a one third upgraded cluster for a rolling upgrade.
task "${baseName}#mixedClusterTask"(type: StandaloneRestIntegTestTask) {
Expand Down Expand Up @@ -615,7 +615,7 @@ task "${baseName}#rollingUpgradeClusterTask"(type: StandaloneRestIntegTestTask)
nonInputProperties.systemProperty('tests.clustername', "${-> testClusters."${baseName}0".getName()}")
}

// Upgrades all the nodes of the old cluster to new OpenSearch version with upgraded plugin version
// Upgrades all the nodes of the old cluster to new OpenSearch version with upgraded plugin version
// at the same time resulting in a fully upgraded cluster.
task "${baseName}#fullRestartClusterTask"(type: StandaloneRestIntegTestTask) {
dependsOn "${baseName}#oldVersionClusterTask1"
Expand Down Expand Up @@ -763,7 +763,7 @@ dependencies {
implementation group: 'io.protostuff', name: 'protostuff-runtime', version: '1.8.0'
implementation group: 'io.protostuff', name: 'protostuff-api', version: '1.8.0'
implementation group: 'io.protostuff', name: 'protostuff-collectionschema', version: '1.8.0'
implementation group: 'org.apache.commons', name: 'commons-lang3', version: '3.12.0'
implementation group: 'org.apache.commons', name: 'commons-lang3', version: '3.13.0'

implementation "org.jacoco:org.jacoco.agent:0.8.5"
implementation ("org.jacoco:org.jacoco.ant:0.8.5") {
Expand Down Expand Up @@ -898,4 +898,4 @@ task updateVersion {
// Include the required files that needs to be updated with new Version
ant.replaceregexp(file:'build.gradle', match: '"opensearch.version", "\\d.*"', replace: '"opensearch.version", "' + newVersion.tokenize('-')[0] + '-SNAPSHOT"', flags:'g', byline:true)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,6 @@
import static org.opensearch.ad.settings.AnomalyDetectorSettings.ANOMALY_RESULTS_INDEX_MAPPING_FILE;
import static org.opensearch.ad.settings.AnomalyDetectorSettings.CHECKPOINT_INDEX_MAPPING_FILE;
import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_PRIMARY_SHARDS;
import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REPLICATION_TYPE;
import static org.opensearch.indices.replication.common.ReplicationType.DOCUMENT;

import java.io.IOException;
import java.net.URL;
Expand Down Expand Up @@ -531,10 +529,7 @@ public void initAnomalyDetectorIndexIfAbsent(ActionListener<CreateIndexResponse>
* @throws IOException IOException from {@link AnomalyDetectionIndices#getAnomalyDetectorMappings}
*/
public void initAnomalyDetectorIndex(ActionListener<CreateIndexResponse> actionListener) throws IOException {
// AD indices need RAW (e.g., we want users to be able to consume AD results as soon as possible and send out an alert if
// anomalies found).
Settings replicationSettings = Settings.builder().put(SETTING_REPLICATION_TYPE, DOCUMENT.name()).build();
CreateIndexRequest request = new CreateIndexRequest(AnomalyDetector.ANOMALY_DETECTORS_INDEX, replicationSettings)
CreateIndexRequest request = new CreateIndexRequest(AnomalyDetector.ANOMALY_DETECTORS_INDEX)
.mapping(getAnomalyDetectorMappings(), XContentType.JSON)
.settings(settings);
adminClient.indices().create(request, markMappingUpToDate(ADIndex.CONFIG, actionListener));
Expand Down Expand Up @@ -599,10 +594,7 @@ public void initAnomalyResultIndexDirectly(
ActionListener<CreateIndexResponse> actionListener
) throws IOException {
String mapping = getAnomalyResultMappings();
// AD indices need RAW (e.g., we want users to be able to consume AD results as soon as possible and send out an alert if
// anomalies found).
Settings replicationSettings = Settings.builder().put(SETTING_REPLICATION_TYPE, DOCUMENT.name()).build();
CreateIndexRequest request = new CreateIndexRequest(resultIndex, replicationSettings).mapping(mapping, XContentType.JSON);
CreateIndexRequest request = new CreateIndexRequest(resultIndex).mapping(mapping, XContentType.JSON);
if (alias != null) {
request.alias(new Alias(CommonName.ANOMALY_RESULT_INDEX_ALIAS));
}
Expand All @@ -621,10 +613,7 @@ public void initAnomalyResultIndexDirectly(
*/
public void initAnomalyDetectorJobIndex(ActionListener<CreateIndexResponse> actionListener) {
try {
// AD indices need RAW (e.g., we want users to be able to consume AD results as soon as possible and send out an alert if
// anomalies found).
Settings replicationSettings = Settings.builder().put(SETTING_REPLICATION_TYPE, DOCUMENT.name()).build();
CreateIndexRequest request = new CreateIndexRequest(AnomalyDetectorJob.ANOMALY_DETECTOR_JOB_INDEX, replicationSettings)
CreateIndexRequest request = new CreateIndexRequest(AnomalyDetectorJob.ANOMALY_DETECTOR_JOB_INDEX)
.mapping(getAnomalyDetectorJobMappings(), XContentType.JSON);
request
.settings(
Expand Down Expand Up @@ -656,10 +645,7 @@ public void initAnomalyDetectorJobIndex(ActionListener<CreateIndexResponse> acti
*/
public void initDetectionStateIndex(ActionListener<CreateIndexResponse> actionListener) {
try {
// AD indices need RAW (e.g., we want users to be able to consume AD results as soon as possible and send out an alert if
// anomalies found).
Settings replicationSettings = Settings.builder().put(SETTING_REPLICATION_TYPE, DOCUMENT.name()).build();
CreateIndexRequest request = new CreateIndexRequest(CommonName.DETECTION_STATE_INDEX, replicationSettings)
CreateIndexRequest request = new CreateIndexRequest(CommonName.DETECTION_STATE_INDEX)
.mapping(getDetectionStateMappings(), XContentType.JSON)
.settings(settings);
adminClient.indices().create(request, markMappingUpToDate(ADIndex.STATE, actionListener));
Expand All @@ -682,11 +668,7 @@ public void initCheckpointIndex(ActionListener<CreateIndexResponse> actionListen
} catch (IOException e) {
throw new EndRunException("", "Cannot find checkpoint mapping file", true);
}
// AD indices need RAW (e.g., we want users to be able to consume AD results as soon as possible and send out an alert if
// anomalies found).
Settings replicationSettings = Settings.builder().put(SETTING_REPLICATION_TYPE, DOCUMENT.name()).build();
CreateIndexRequest request = new CreateIndexRequest(CommonName.CHECKPOINT_INDEX_NAME, replicationSettings)
.mapping(mapping, XContentType.JSON);
CreateIndexRequest request = new CreateIndexRequest(CommonName.CHECKPOINT_INDEX_NAME).mapping(mapping, XContentType.JSON);
choosePrimaryShards(request);
adminClient.indices().create(request, markMappingUpToDate(ADIndex.CHECKPOINT, actionListener));
}
Expand Down Expand Up @@ -743,10 +725,7 @@ void rolloverAndDeleteHistoryIndex() {
}
CreateIndexRequest createRequest = rollOverRequest.getCreateIndexRequest();

// time series indices need RAW (e.g., we want users to be able to consume AD results as soon as possible
// and send out an alert if anomalies found).
Settings replicationSettings = Settings.builder().put(SETTING_REPLICATION_TYPE, DOCUMENT.name()).build();
createRequest.index(AD_RESULT_HISTORY_INDEX_PATTERN).settings(replicationSettings).mapping(adResultMapping, XContentType.JSON);
createRequest.index(AD_RESULT_HISTORY_INDEX_PATTERN).mapping(adResultMapping, XContentType.JSON);

choosePrimaryShards(createRequest);

Expand Down
Loading