Skip to content

Commit

Permalink
Correct handling of null max aggregation values in SearchResponse (#1292
Browse files Browse the repository at this point in the history
)

Previously, when the max aggregation in SearchResponse returned a null value, it was converted to a negative number (-9223372036854775808), leading to erroneous timestamps.

This fix adds a check to ensure that only valid positive aggregation values are considered, preventing null or invalid values from being misinterpreted as negative numbers. The change applies to only timestamps. So a positive number filter is always right.

Testing done:
1. Added unit tests to cover scenarios where max aggregation returns null or negative values, ensuring the method returns Optional.empty() when the value is invalid.
2. Added unit tests to verify correct handling of valid positive max aggregation values.
3. Ran e2e manual testing.

Previously:

```
POST /_plugins/_anomaly_detection/detectors/_validate/model
{
    "name": "test-detector-13",
    "description": "Test detector",
    "time_field": "customer_birth_date",
    "indices": ["opensearch_dashboards_sample_data_ecommerce"],
    "feature_attributes": [
        {
            "feature_name": "cpu",
            "feature_enabled": true,
            "aggregation_query": {
                "total_revenue_usd-field": {
                    "max": {
                        "field": "day_of_week_i"
                    }
                }
            }
        }
    ],
    "detection_interval": {
        "period": {
            "interval":1,
            "unit": "Minutes"
        }
    },
    "window_delay": {
        "period": {
            "interval": 1,
            "unit": "Minutes"
        }
    },
    "category_field": ["currency"]
}
```

returns:

```
nested: OpenSearchParseException[failed to parse date field [-9223372036854775808] with format [strict_date_optional_time||epoch_millis]: [failed to parse date field [-9223372036854775808] with format [strict_date_optional_time||epoch_millis]]]; nested: IllegalArgumentException[failed to parse date field [-9223372036854775808] with format [strict_date_optional_time||epoch_millis]]; nested: DateTimeParseException[Failed to parse with all enclosed parsers]; } opensearch-ccs-node1 | [2024-09-03T15:05:45,776][ERROR][o.o.t.r.h.ModelValidationActionHandler] [2a2cd14da04d] Failed to create search request for last data point opensearch-ccs-node1 | org.opensearch.action.search.SearchPhaseExecutionException: all shards failed opensearch-ccs-node1 | at org.opensearch.action.search.AbstractSearchAsyncAction.onPhaseFailure(AbstractSearchAsyncAction.java:770) [opensearch-3.0.0.jar:3.0.0]
```

Now the call returns:

```
{ "model": { "time_field": { "message": "There isn't enough historical data found with current timefield selected." } } }
```

Signed-off-by: Kaituo Li <[email protected]>
  • Loading branch information
kaituo authored Sep 3, 2024
1 parent afd5da9 commit e47e3c3
Show file tree
Hide file tree
Showing 2 changed files with 164 additions and 1 deletion.
8 changes: 7 additions & 1 deletion src/main/java/org/opensearch/timeseries/util/ParseUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -699,7 +699,13 @@ public static Optional<Long> getLatestDataTime(SearchResponse searchResponse) {
.map(SearchResponse::getAggregations)
.map(aggs -> aggs.asMap())
.map(map -> (Max) map.get(CommonName.AGG_NAME_MAX_TIME))
.map(agg -> (long) agg.getValue());
// Explanation:
// - `agg != null`: Ensures the aggregation object is not null.
// - `agg.getValue() > 0`: Checks that the value is positive, as the aggregation might return a default negative value
// like -9223372036854775808 if the underlying data is null or invalid. This check prevents converting such invalid
// values to a timestamp, which would not make sense in the context of time-based data.
.filter(agg -> agg != null && agg.getValue() > 0) // Check if the value is valid and positive.
.map(agg -> (long) agg.getValue()); // Cast to long if valid
}

/**
Expand Down
157 changes: 157 additions & 0 deletions src/test/java/org/opensearch/timeseries/util/ParseUtilsTests.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,157 @@
/*
* Copyright OpenSearch Contributors
* SPDX-License-Identifier: Apache-2.0
*/

package org.opensearch.timeseries.util;

import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;

import java.util.Collections;
import java.util.Optional;

import org.apache.lucene.search.TotalHits;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.action.search.SearchResponseSections;
import org.opensearch.action.search.ShardSearchFailure;
import org.opensearch.search.SearchHit;
import org.opensearch.search.SearchHits;
import org.opensearch.search.aggregations.Aggregations;
import org.opensearch.search.aggregations.metrics.Max;
import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.timeseries.constant.CommonName;

public class ParseUtilsTests extends OpenSearchTestCase {

public void testGetLatestDataTime_withValidMaxValue() {
Max maxAggregation = mock(Max.class);
when(maxAggregation.getValue()).thenReturn(1623840000000.0); // A valid timestamp value

Aggregations aggregations = new Aggregations(Collections.singletonList(maxAggregation));
when(maxAggregation.getName()).thenReturn(CommonName.AGG_NAME_MAX_TIME);

SearchResponseSections sections = new SearchResponseSections(
new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0),
aggregations,
null,
false,
null,
null,
1
);
SearchResponse searchResponse = new SearchResponse(
sections,
null,
0,
0,
0,
0L,
ShardSearchFailure.EMPTY_ARRAY,
SearchResponse.Clusters.EMPTY
);

Optional<Long> result = ParseUtils.getLatestDataTime(searchResponse);

assertTrue(result.isPresent());
assertEquals("got " + result.get(), 1623840000000L, result.get().longValue());
}

public void testGetLatestDataTime_withNullValue() {
Max maxAggregation = mock(Max.class);
when(maxAggregation.getValue()).thenReturn(Double.NEGATIVE_INFINITY); // Simulating a missing value scenario

Aggregations aggregations = new Aggregations(Collections.singletonList(maxAggregation));
when(maxAggregation.getName()).thenReturn(CommonName.AGG_NAME_MAX_TIME);

SearchResponseSections sections = new SearchResponseSections(
new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0),
aggregations,
null,
false,
null,
null,
1
);
SearchResponse searchResponse = new SearchResponse(
sections,
null,
0,
0,
0,
0L,
ShardSearchFailure.EMPTY_ARRAY,
SearchResponse.Clusters.EMPTY
);

// Act
Optional<Long> result = ParseUtils.getLatestDataTime(searchResponse);

// Assert
assertTrue(result.isEmpty());
}

public void testGetLatestDataTime_withNoAggregations() {
// Arrange
SearchResponseSections sections = new SearchResponseSections(
new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0),
null, // No aggregations provided
null,
false,
null,
null,
1
);
SearchResponse searchResponse = new SearchResponse(
sections,
null,
0,
0,
0,
0L,
ShardSearchFailure.EMPTY_ARRAY,
SearchResponse.Clusters.EMPTY
);

// Act
Optional<Long> result = ParseUtils.getLatestDataTime(searchResponse);

// Assert
assertTrue(result.isEmpty());
}

public void testGetLatestDataTime_withNegativeMaxValue() {
// Arrange
Max maxAggregation = mock(Max.class);
when(maxAggregation.getValue()).thenReturn(-9223372036854775808.0); // Invalid negative value

Aggregations aggregations = new Aggregations(Collections.singletonList(maxAggregation));
when(maxAggregation.getName()).thenReturn(CommonName.AGG_NAME_MAX_TIME);

SearchResponseSections sections = new SearchResponseSections(
new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0),
aggregations,
null,
false,
null,
null,
1
);
SearchResponse searchResponse = new SearchResponse(
sections,
null,
0,
0,
0,
0L,
ShardSearchFailure.EMPTY_ARRAY,
SearchResponse.Clusters.EMPTY
);

// Act
Optional<Long> result = ParseUtils.getLatestDataTime(searchResponse);

// Assert
assertTrue(result.isEmpty());
}
}

0 comments on commit e47e3c3

Please sign in to comment.