Skip to content

Commit

Permalink
Merge pull request #4729 from gchq/gh-4690_fix_meta_data_source
Browse files Browse the repository at this point in the history
Gh 4690 fix meta data source
  • Loading branch information
stroomdev66 authored Jan 28, 2025
2 parents e7808bd + 6d68682 commit 4ac0895
Show file tree
Hide file tree
Showing 62 changed files with 755 additions and 556 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

import stroom.analytics.api.AnalyticsService;
import stroom.datasource.api.v2.DataSourceProvider;
import stroom.explorer.api.HasDataSourceDocRefs;
import stroom.explorer.api.IsSpecialExplorerDataSource;
import stroom.job.api.ScheduledJobsBinder;
import stroom.processor.api.ProcessorTaskExecutorBinder;
import stroom.processor.shared.ProcessorType;
Expand Down Expand Up @@ -86,7 +86,7 @@ protected void configure() {
.addBinding(AnalyticsSearchProvider.class);
GuiceUtil.buildMultiBinder(binder(), SearchProvider.class)
.addBinding(AnalyticsSearchProvider.class);
GuiceUtil.buildMultiBinder(binder(), HasDataSourceDocRefs.class)
GuiceUtil.buildMultiBinder(binder(), IsSpecialExplorerDataSource.class)
.addBinding(AnalyticsSearchProvider.class);
GuiceUtil.buildMultiBinder(binder(), NodeSearchTaskHandlerProvider.class)
.addBinding(AnalyticsNodeSearchTaskHandlerProvider.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
import stroom.datasource.api.v2.FindFieldCriteria;
import stroom.datasource.api.v2.QueryField;
import stroom.docref.DocRef;
import stroom.explorer.api.HasDataSourceDocRefs;
import stroom.explorer.api.IsSpecialExplorerDataSource;
import stroom.expression.api.DateTimeSettings;
import stroom.query.api.v2.ExpressionOperator;
import stroom.query.api.v2.ExpressionUtil;
Expand All @@ -45,7 +45,7 @@
import java.util.Optional;
import java.util.Set;

public class AnalyticsSearchProvider implements SearchProvider, HasDataSourceDocRefs {
public class AnalyticsSearchProvider implements SearchProvider, IsSpecialExplorerDataSource {

private final CoprocessorsFactory coprocessorsFactory;
private final ResultStoreFactory resultStoreFactory;
Expand All @@ -65,9 +65,9 @@ public AnalyticsSearchProvider(final CoprocessorsFactory coprocessorsFactory,

@Override
public ResultPage<QueryField> getFieldInfo(final FindFieldCriteria criteria) {
if (!getType().equals(criteria.getDataSourceRef().getType())) {
return ResultPage.empty();
}
// if (!getType().equals(criteria.getDataSourceRef().getType())) {
// return ResultPage.empty();
// }
return FieldInfoResultPageBuilder.builder(criteria)
.addAll(AnalyticFields.getFields())
.build();
Expand All @@ -79,18 +79,8 @@ public int getFieldCount(final DocRef docRef) {
}

@Override
public Optional<String> fetchDocumentation(final DocRef docRef) {
return Optional.empty();
}

@Override
public DocRef fetchDefaultExtractionPipeline(final DocRef dataSourceRef) {
return null;
}

@Override
public QueryField getTimeField(final DocRef docRef) {
return AnalyticFields.TIME_FIELD;
public Optional<QueryField> getTimeField(final DocRef docRef) {
return Optional.of(AnalyticFields.TIME_FIELD);
}

public ResultStore createResultStore(final SearchRequest searchRequest) {
Expand Down Expand Up @@ -146,7 +136,7 @@ public ResultStore createResultStore(final SearchRequest searchRequest) {
* highlighting.
*/
private Set<String> getHighlights(final ExpressionOperator expression,
DateTimeSettings dateTimeSettings) {
final DateTimeSettings dateTimeSettings) {
Set<String> highlights = Collections.emptySet();

// try {
Expand All @@ -167,17 +157,12 @@ private Set<String> getHighlights(final ExpressionOperator expression,
}

@Override
public List<DocRef> list() {
public List<DocRef> getDataSourceDocRefs() {
return List.of(AnalyticFields.ANALYTICS_DOC_REF);
}

@Override
public String getType() {
public String getDataSourceType() {
return AnalyticFields.ANALYTICS_DOC_REF.getType();
}

@Override
public List<DocRef> getDataSourceDocRefs() {
return List.of(AnalyticFields.ANALYTICS_DOC_REF);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ private BiConsumer<AnalyticRuleDoc, DependencyRemapper> createMapper() {
final DataSourceProviderRegistry dataSourceProviderRegistry =
dataSourceProviderRegistryProvider.get();
final Optional<DocRef> optional = dataSourceProviderRegistry
.list()
.getDataSourceDocRefs()
.stream()
.filter(dr -> dr.equals(docRef))
.findAny();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

import stroom.annotation.api.AnnotationCreator;
import stroom.annotation.shared.AnnotationDetail;
import stroom.datasource.api.v2.DataSourceProvider;
import stroom.event.logging.api.ObjectInfoProviderBinder;
import stroom.search.extraction.AnnotationsDecoratorFactory;
import stroom.searchable.api.Searchable;
Expand All @@ -42,6 +43,8 @@ protected void configure() {
ObjectInfoProviderBinder.create(binder())
.bind(AnnotationDetail.class, AnnotationEventInfoProvider.class);

GuiceUtil.buildMultiBinder(binder(), DataSourceProvider.class)
.addBinding(AnnotationService.class);
GuiceUtil.buildMultiBinder(binder(), Searchable.class)
.addBinding(AnnotationService.class);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,13 +46,14 @@

import jakarta.inject.Inject;

import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Optional;

public class AnnotationService implements Searchable, AnnotationCreator, HasUserDependencies {

private static final DocRef ANNOTATIONS_PSEUDO_DOC_REF = new DocRef("Searchable", "Annotations", "Annotations");
private static final DocRef ANNOTATIONS_PSEUDO_DOC_REF = new DocRef("Annotations", "Annotations", "Annotations");

private final AnnotationDao annotationDao;
private final SecurityContext securityContext;
Expand All @@ -65,13 +66,21 @@ public class AnnotationService implements Searchable, AnnotationCreator, HasUser
}

@Override
public DocRef getDocRef() {
try {
checkPermission();
return ANNOTATIONS_PSEUDO_DOC_REF;
} catch (final PermissionException e) {
return null;
public String getDataSourceType() {
return ANNOTATIONS_PSEUDO_DOC_REF.getType();
}

@Override
public List<DocRef> getDataSourceDocRefs() {
if (securityContext.hasAppPermission(AppPermission.ANNOTATIONS)) {
return Collections.singletonList(ANNOTATIONS_PSEUDO_DOC_REF);
}
return Collections.emptyList();
}

@Override
public Optional<QueryField> getTimeField(final DocRef docRef) {
return Optional.of(AnnotationFields.UPDATED_ON_FIELD);
}

@Override
Expand All @@ -87,16 +96,6 @@ public int getFieldCount(final DocRef docRef) {
return NullSafe.size(AnnotationFields.FIELDS);
}

@Override
public Optional<String> fetchDocumentation(final DocRef docRef) {
return Optional.empty();
}

@Override
public QueryField getTimeField() {
return AnnotationFields.UPDATED_ON_FIELD;
}

@Override
public void search(final ExpressionCriteria criteria,
final FieldIndex fieldIndex,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -197,8 +197,8 @@ private String buildXML(final String[] include, final String[] exclude) {
<query>
<dataSource>
<type>StreamStore</type>
<uuid>0</uuid>
<name>StreamStore</name>
<uuid>StreamStore</uuid>
<name>Stream Store</name>
</dataSource>
<expression>
<children>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
public class IndexShardFields {

public static final DocRef INDEX_SHARDS_PSEUDO_DOC_REF = new DocRef(
"Searchable", "Index Shards", "Index Shards");
"IndexShards", "IndexShards", "Index Shards");

public static final String FIELD_NAME_NODE = "Node";
public static final String FIELD_NAME_INDEX = "Index";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ public class MetaFields {
public static final String STREAM_STORE_TYPE = "StreamStore";
public static final DocRef STREAM_STORE_DOC_REF = DocRef.builder()
.type(STREAM_STORE_TYPE)
.uuid("0")
.name(STREAM_STORE_TYPE)
.uuid(STREAM_STORE_TYPE)
.name("Stream Store")
.build();

public static final String FIELD_ID = "Id";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@
public class ReferenceDataFields {

public static final DocRef REF_STORE_PSEUDO_DOC_REF = new DocRef(
"Searchable",
"Reference Data Store",
"ReferenceDataStore",
"ReferenceDataStore",
"Reference Data Store (This Node Only)");
public static final QueryField FEED_NAME_FIELD = QueryField
.builder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@
public class ProcessorTaskFields {

public static final DocRef PROCESSOR_TASK_PSEUDO_DOC_REF = new DocRef(
"Searchable",
"Processor Tasks",
"ProcessorTasks",
"ProcessorTasks",
"Processor Tasks");

private static final List<QueryField> FIELDS = new ArrayList<>();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import java.util.List;

public interface HasDataSourceDocRefs {
public interface IsSpecialExplorerDataSource {

List<DocRef> getDataSourceDocRefs();
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import stroom.docref.DocRef;
import stroom.explorer.api.ExplorerDecorator;
import stroom.explorer.api.HasDataSourceDocRefs;
import stroom.explorer.api.IsSpecialExplorerDataSource;

import jakarta.inject.Inject;

Expand All @@ -13,10 +13,10 @@

public class ExplorerDecoratorImpl implements ExplorerDecorator {

private final Set<HasDataSourceDocRefs> set;
private final Set<IsSpecialExplorerDataSource> set;

@Inject
public ExplorerDecoratorImpl(final Set<HasDataSourceDocRefs> set) {
public ExplorerDecoratorImpl(final Set<IsSpecialExplorerDataSource> set) {
this.set = set;
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,47 +1,61 @@
package stroom.index.impl.db;

import stroom.datasource.api.v2.QueryField;
import stroom.docref.DocRef;
import stroom.entity.shared.ExpressionCriteria;
import stroom.index.impl.IndexShardDao;
import stroom.index.impl.IndexVolumeDao;
import stroom.index.impl.IndexVolumeGroupDao;
import stroom.index.shared.AllPartition;
import stroom.index.shared.IndexShard;
import stroom.index.shared.IndexShardFields;
import stroom.index.shared.IndexShardKey;
import stroom.index.shared.IndexVolume;
import stroom.index.shared.IndexVolumeGroup;
import stroom.index.shared.LuceneIndexDoc;
import stroom.query.language.functions.FieldIndex;
import stroom.util.AuditUtil;
import stroom.util.io.ByteSizeUnit;
import stroom.util.shared.Clearable;

import com.google.inject.Guice;
import com.google.inject.Injector;
import jakarta.inject.Inject;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;

import java.nio.file.Path;
import java.time.Instant;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;

class TestIndexShardDaoImpl {
import static org.assertj.core.api.Assertions.assertThat;

private static IndexVolumeDao indexVolumeDao;
private static IndexVolumeGroupDao indexVolumeGroupDao;
private static IndexShardDao indexShardDao;
private static Path tempDir;
class TestIndexShardDaoImpl {

@BeforeAll
static void beforeAll(@TempDir final Path tempDir) {
@Inject
private IndexVolumeDao indexVolumeDao;
@Inject
private IndexVolumeGroupDao indexVolumeGroupDao;
@Inject
private IndexShardDao indexShardDao;
@Inject
private Set<Clearable> clearables;
private Path tempDir;

@BeforeEach
void beforeEach(@TempDir final Path tempDir) {
final Injector injector = Guice.createInjector(
new IndexDbModule(),
new IndexDaoModule(),
new TestModule());

indexVolumeDao = injector.getInstance(IndexVolumeDao.class);
indexVolumeGroupDao = injector.getInstance(IndexVolumeGroupDao.class);
indexShardDao = injector.getInstance(IndexShardDao.class);
TestIndexShardDaoImpl.tempDir = tempDir;
injector.injectMembers(this);
clearables.forEach(Clearable::clear);
this.tempDir = tempDir;
}

// @Test
Expand Down Expand Up @@ -153,4 +167,47 @@ private IndexVolumeGroup createGroup(final String name) {
AuditUtil.stamp(() -> "test", indexVolumeGroup);
return indexVolumeGroupDao.getOrCreate(indexVolumeGroup);
}

@Test
void testSearch() {
// Given
final DocRef index = DocRef.builder()
.uuid(UUID.randomUUID().toString())
.name(TestData.createIndexName())
.type(LuceneIndexDoc.TYPE)
.build();
final String nodeName = TestData.createNodeName();

final String volumeGroupName = TestData.createVolumeGroupName();
final IndexVolumeGroup indexVolumeGroup = createGroup(volumeGroupName);

final IndexVolume indexVolume = createVolume(
nodeName, tempDir.resolve("my_vol1").toString(), indexVolumeGroup);

// When
createGroup(volumeGroupName);
final IndexShardKey indexShardKey = IndexShardKey
.builder()
.indexUuid(index.getUuid())
.partition(AllPartition.INSTANCE)
.build();

indexShardDao.create(indexShardKey, indexVolume, nodeName, "1.0-test");
indexShardDao.create(indexShardKey, indexVolume, nodeName, "1.0-test");

final List<QueryField> fields = IndexShardFields.getFields();
assertThat(fields.size()).isEqualTo(10);

for (final QueryField field : fields) {
final FieldIndex fieldIndex = new FieldIndex();
fieldIndex.create(field.getFldName());

final AtomicInteger count = new AtomicInteger();
indexShardDao.search(new ExpressionCriteria(), fieldIndex, values -> {
count.incrementAndGet();
assertThat(values.length).isEqualTo(1);
});
assertThat(count.get()).isEqualTo(2);
}
}
}
Loading

0 comments on commit 4ac0895

Please sign in to comment.