Skip to content

Commit

Permalink
feat(ui) Update auto-complete functionality and design (datahub-proje…
Browse files Browse the repository at this point in the history
…ct#7515)

Adds in new styles for auto-complete as well as the new functionality around quick filters.
  • Loading branch information
chriscollins3456 authored Mar 27, 2023
1 parent 279f38a commit ba6426e
Show file tree
Hide file tree
Showing 67 changed files with 1,881 additions and 261 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@
import com.linkedin.datahub.graphql.generated.PolicyMatchCriterionValue;
import com.linkedin.datahub.graphql.generated.QueryEntity;
import com.linkedin.datahub.graphql.generated.QuerySubject;
import com.linkedin.datahub.graphql.generated.QuickFilter;
import com.linkedin.datahub.graphql.generated.RecommendationContent;
import com.linkedin.datahub.graphql.generated.SchemaFieldEntity;
import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResult;
Expand Down Expand Up @@ -204,6 +205,7 @@
import com.linkedin.datahub.graphql.resolvers.role.ListRolesResolver;
import com.linkedin.datahub.graphql.resolvers.search.AutoCompleteForMultipleResolver;
import com.linkedin.datahub.graphql.resolvers.search.AutoCompleteResolver;
import com.linkedin.datahub.graphql.resolvers.search.GetQuickFiltersResolver;
import com.linkedin.datahub.graphql.resolvers.search.ScrollAcrossEntitiesResolver;
import com.linkedin.datahub.graphql.resolvers.search.ScrollAcrossLineageResolver;
import com.linkedin.datahub.graphql.resolvers.search.SearchAcrossEntitiesResolver;
Expand Down Expand Up @@ -743,6 +745,7 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) {
.dataFetcher("listGlobalViews", new ListGlobalViewsResolver(this.entityClient))
.dataFetcher("globalViewsSettings", new GlobalViewsSettingsResolver(this.settingsService))
.dataFetcher("listQueries", new ListQueriesResolver(this.entityClient))
.dataFetcher("getQuickFilters", new GetQuickFiltersResolver(this.entityClient, this.viewService))
);
}

Expand Down Expand Up @@ -965,6 +968,10 @@ private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder
(env) -> ((ListTestsResult) env.getSource()).getTests().stream()
.map(Test::getUrn)
.collect(Collectors.toList())))
)
.type("QuickFilter", typeWiring -> typeWiring
.dataFetcher("entity", new EntityTypeResolver(entityTypes,
(env) -> ((QuickFilter) env.getSource()).getEntity()))
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,12 @@ public AutoCompleteForMultipleResolver(@Nonnull final List<SearchableEntityType<
public CompletableFuture<AutoCompleteMultipleResults> get(DataFetchingEnvironment environment) {
final AutoCompleteMultipleInput input = bindArgument(environment.getArgument("input"), AutoCompleteMultipleInput.class);

// escape forward slash since it is a reserved character in Elasticsearch
final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery());
if (isBlank(sanitizedQuery)) {
if (isBlank(input.getQuery())) {
_logger.error("'query' parameter was null or empty");
throw new ValidationException("'query' parameter can not be null or empty");
}
// escape forward slash since it is a reserved character in Elasticsearch
final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery());

List<EntityType> types = input.getTypes();
if (types != null && types.size() > 0) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import com.linkedin.datahub.graphql.generated.AutoCompleteResults;
import com.linkedin.datahub.graphql.generated.EntityType;
import com.linkedin.datahub.graphql.resolvers.ResolverUtils;
import com.linkedin.metadata.query.filter.Filter;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;

Expand Down Expand Up @@ -49,6 +50,7 @@ public CompletableFuture<AutoCompleteResults> get(DataFetchingEnvironment enviro
throw new ValidationException("'query' parameter can not be null or empty");
}

final Filter filter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters());
final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT;
return CompletableFuture.supplyAsync(() -> {
try {
Expand All @@ -62,7 +64,7 @@ public CompletableFuture<AutoCompleteResults> get(DataFetchingEnvironment enviro
return _typeToEntity.get(input.getType()).autoComplete(
sanitizedQuery,
input.getField(),
input.getFilters(),
filter,
limit,
environment.getContext()
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@
import com.linkedin.datahub.graphql.generated.AutoCompleteMultipleResults;
import com.linkedin.datahub.graphql.generated.AutoCompleteResultForEntity;
import com.linkedin.datahub.graphql.generated.AutoCompleteResults;
import com.linkedin.datahub.graphql.resolvers.ResolverUtils;
import com.linkedin.datahub.graphql.types.SearchableEntityType;
import com.linkedin.metadata.query.filter.Filter;
import graphql.schema.DataFetchingEnvironment;
import java.util.ArrayList;
import java.util.Collections;
Expand All @@ -31,11 +33,12 @@ public static CompletableFuture<AutoCompleteMultipleResults> batchGetAutocomplet
final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT;

final List<CompletableFuture<AutoCompleteResultForEntity>> autoCompletesFuture = entities.stream().map(entity -> CompletableFuture.supplyAsync(() -> {
final Filter filter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters());
try {
final AutoCompleteResults searchResult = entity.autoComplete(
sanitizedQuery,
input.getField(),
input.getFilters(),
filter,
limit,
environment.getContext()
);
Expand All @@ -49,7 +52,7 @@ public static CompletableFuture<AutoCompleteMultipleResults> batchGetAutocomplet
+ String.format("field %s, query %s, filters: %s, limit: %s",
input.getField(),
input.getQuery(),
input.getFilters(),
filter,
input.getLimit()), e);
return new AutoCompleteResultForEntity(entity.type(), Collections.emptyList(), Collections.emptyList());
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,177 @@
package com.linkedin.datahub.graphql.resolvers.search;

import com.datahub.authentication.Authentication;
import com.linkedin.common.urn.UrnUtils;
import com.linkedin.datahub.graphql.generated.Entity;
import com.linkedin.datahub.graphql.generated.GetQuickFiltersInput;
import com.linkedin.datahub.graphql.generated.GetQuickFiltersResult;
import com.linkedin.datahub.graphql.generated.QuickFilter;
import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper;
import com.linkedin.datahub.graphql.resolvers.ResolverUtils;
import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.metadata.search.AggregationMetadata;
import com.linkedin.metadata.search.AggregationMetadataArray;
import com.linkedin.metadata.search.FilterValue;
import com.linkedin.metadata.search.SearchResult;
import com.linkedin.metadata.service.ViewService;
import com.linkedin.view.DataHubViewInfo;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;

import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;

import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument;
import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES;
import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView;

@Slf4j
@RequiredArgsConstructor
public class GetQuickFiltersResolver implements DataFetcher<CompletableFuture<GetQuickFiltersResult>> {

private final EntityClient _entityClient;
private final ViewService _viewService;

private static final String PLATFORM = "platform";
private static final int PLATFORM_COUNT = 5;
private static final String ENTITY = "entity";
private static final int SOURCE_ENTITY_COUNT = 3;
private static final int DATAHUB_ENTITY_COUNT = 2;

public CompletableFuture<GetQuickFiltersResult> get(final DataFetchingEnvironment environment) throws Exception {
final GetQuickFiltersInput input = bindArgument(environment.getArgument("input"), GetQuickFiltersInput.class);

return CompletableFuture.supplyAsync(() -> {
final GetQuickFiltersResult result = new GetQuickFiltersResult();
final List<QuickFilter> quickFilters = new ArrayList<>();

try {
final SearchResult searchResult = getSearchResults(ResolverUtils.getAuthentication(environment), input);
final AggregationMetadataArray aggregations = searchResult.getMetadata().getAggregations();

quickFilters.addAll(getPlatformQuickFilters(aggregations));
quickFilters.addAll(getEntityTypeQuickFilters(aggregations));
} catch (Exception e) {
log.error("Failed getting quick filters", e);
throw new RuntimeException("Failed to to get quick filters", e);
}

result.setQuickFilters(quickFilters);
return result;
});
}

/**
* Do a star search with view filter applied to get info about all data in this instance.
*/
private SearchResult getSearchResults(@Nonnull final Authentication authentication, @Nonnull final GetQuickFiltersInput input) throws Exception {
final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null)
? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), authentication)
: null;
final List<String> entityNames = SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList());

return _entityClient.searchAcrossEntities(
maybeResolvedView != null
? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes())
: entityNames,
"*",
maybeResolvedView != null
? SearchUtils.combineFilters(null, maybeResolvedView.getDefinition().getFilter())
: null,
0,
0,
null,
authentication);
}

/**
* Get platforms and their count from an aggregations array, sorts by entity count, and map the top 5 to quick filters
*/
private List<QuickFilter> getPlatformQuickFilters(@Nonnull final AggregationMetadataArray aggregations) {
final List<QuickFilter> platforms = new ArrayList<>();
final Optional<AggregationMetadata> platformAggregations = aggregations.stream().filter(agg -> agg.getName().equals(PLATFORM)).findFirst();
if (platformAggregations.isPresent()) {
final List<FilterValue> sortedPlatforms =
platformAggregations.get().getFilterValues().stream().sorted(Comparator.comparingLong(val -> -val.getFacetCount())).collect(Collectors.toList());
sortedPlatforms.forEach(platformFilter -> {
if (platforms.size() < PLATFORM_COUNT && platformFilter.getFacetCount() > 0) {
platforms.add(mapQuickFilter(PLATFORM, platformFilter));
}
});
}

// return platforms sorted alphabetically by their name
return platforms.stream().sorted(Comparator.comparing(QuickFilter::getValue)).collect(Collectors.toList());
}

/**
* Gets entity type quick filters from search aggregations. First, get source entity type quick filters
* from a prioritized list. Do the same for datathub entity types.
*/
private List<QuickFilter> getEntityTypeQuickFilters(@Nonnull final AggregationMetadataArray aggregations) {
final List<QuickFilter> entityTypes = new ArrayList<>();
final Optional<AggregationMetadata> entityAggregations = aggregations.stream().filter(agg -> agg.getName().equals(ENTITY)).findFirst();

if (entityAggregations.isPresent()) {
final List<QuickFilter> sourceEntityTypeFilters =
getQuickFiltersFromList(SearchUtils.PRIORITIZED_SOURCE_ENTITY_TYPES, SOURCE_ENTITY_COUNT, entityAggregations.get());
entityTypes.addAll(sourceEntityTypeFilters);

final List<QuickFilter> dataHubEntityTypeFilters =
getQuickFiltersFromList(SearchUtils.PRIORITIZED_DATAHUB_ENTITY_TYPES, DATAHUB_ENTITY_COUNT, entityAggregations.get());
entityTypes.addAll(dataHubEntityTypeFilters);
}
return entityTypes;
}

/**
* Create a quick filters list by looping over prioritized list and adding filters that exist until we reach the maxListSize defined
*/
private List<QuickFilter> getQuickFiltersFromList(
@Nonnull final List<String> prioritizedList,
final int maxListSize,
@Nonnull final AggregationMetadata entityAggregations
) {
final List<QuickFilter> entityTypes = new ArrayList<>();
prioritizedList.forEach(entityType -> {
if (entityTypes.size() < maxListSize) {
final Optional<FilterValue> entityFilter = entityAggregations.getFilterValues().stream().filter(val -> val.getValue().equals(entityType)).findFirst();
if (entityFilter.isPresent() && entityFilter.get().getFacetCount() > 0) {
entityTypes.add(mapQuickFilter(ENTITY, entityFilter.get()));
}
}
});

return entityTypes;
}

private QuickFilter mapQuickFilter(@Nonnull final String field, @Nonnull final FilterValue filterValue) {
final boolean isEntityTypeFilter = field.equals(ENTITY);
final QuickFilter quickFilter = new QuickFilter();
quickFilter.setField(field);
quickFilter.setValue(convertFilterValue(filterValue.getValue(), isEntityTypeFilter));
if (filterValue.getEntity() != null) {
final Entity entity = UrnToEntityMapper.map(filterValue.getEntity());
quickFilter.setEntity(entity);
}
return quickFilter;
}

/**
* If we're working with an entity type filter, we need to convert the value to an EntityType
*/
public static String convertFilterValue(String filterValue, boolean isEntityType) {
if (isEntityType) {
return EntityTypeMapper.getType(filterValue).toString();
}
return filterValue;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,28 @@
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import lombok.extern.slf4j.Slf4j;
import org.codehaus.plexus.util.CollectionUtils;

import static com.linkedin.metadata.Constants.CHART_ENTITY_NAME;
import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME;
import static com.linkedin.metadata.Constants.CORP_GROUP_ENTITY_NAME;
import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME;
import static com.linkedin.metadata.Constants.DASHBOARD_ENTITY_NAME;
import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME;
import static com.linkedin.metadata.Constants.DATA_FLOW_ENTITY_NAME;
import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME;
import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME;
import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME;
import static com.linkedin.metadata.Constants.ML_FEATURE_ENTITY_NAME;
import static com.linkedin.metadata.Constants.ML_FEATURE_TABLE_ENTITY_NAME;
import static com.linkedin.metadata.Constants.ML_MODEL_ENTITY_NAME;
import static com.linkedin.metadata.Constants.ML_MODEL_GROUP_ENTITY_NAME;
import static com.linkedin.metadata.Constants.ML_PRIMARY_KEY_ENTITY_NAME;


@Slf4j
public class SearchUtils {
Expand Down Expand Up @@ -72,6 +89,32 @@ private SearchUtils() {
EntityType.CORP_GROUP,
EntityType.NOTEBOOK);

/**
* A prioritized list of source filter types used to generate quick filters
*/
public static final List<String> PRIORITIZED_SOURCE_ENTITY_TYPES = Stream.of(
DATASET_ENTITY_NAME,
DASHBOARD_ENTITY_NAME,
DATA_FLOW_ENTITY_NAME,
DATA_JOB_ENTITY_NAME,
CHART_ENTITY_NAME,
CONTAINER_ENTITY_NAME,
ML_MODEL_ENTITY_NAME,
ML_MODEL_GROUP_ENTITY_NAME,
ML_FEATURE_ENTITY_NAME,
ML_FEATURE_TABLE_ENTITY_NAME,
ML_PRIMARY_KEY_ENTITY_NAME
).map(String::toLowerCase).collect(Collectors.toList());

/**
* A prioritized list of DataHub filter types used to generate quick filters
*/
public static final List<String> PRIORITIZED_DATAHUB_ENTITY_TYPES = Stream.of(
DOMAIN_ENTITY_NAME,
GLOSSARY_TERM_ENTITY_NAME,
CORP_GROUP_ENTITY_NAME,
CORP_USER_ENTITY_NAME
).map(String::toLowerCase).collect(Collectors.toList());

/**
* Combines two {@link Filter} instances in a conjunction and returns a new instance of {@link Filter}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import com.linkedin.datahub.graphql.generated.Entity;
import com.linkedin.datahub.graphql.generated.FacetFilterInput;
import com.linkedin.datahub.graphql.generated.SearchResults;
import com.linkedin.metadata.query.filter.Filter;

import javax.annotation.Nonnull;
import javax.annotation.Nullable;
Expand Down Expand Up @@ -49,7 +50,7 @@ SearchResults search(@Nonnull String query,
*/
AutoCompleteResults autoComplete(@Nonnull String query,
@Nullable String field,
@Nullable List<FacetFilterInput> filters,
@Nullable Filter filters,
int limit,
@Nonnull final QueryContext context) throws Exception;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import com.linkedin.metadata.authorization.PoliciesConfig;
import com.linkedin.metadata.browse.BrowseResult;
import com.linkedin.metadata.query.AutoCompleteResult;
import com.linkedin.metadata.query.filter.Filter;
import com.linkedin.metadata.query.SearchFlags;
import com.linkedin.metadata.search.SearchResult;
import com.linkedin.mxe.MetadataChangeProposal;
Expand Down Expand Up @@ -152,14 +153,13 @@ public SearchResults search(@Nonnull String query,
@Override
public AutoCompleteResults autoComplete(@Nonnull String query,
@Nullable String field,
@Nullable List<FacetFilterInput> filters,
@Nullable Filter filters,
int limit,
@Nonnull QueryContext context) throws Exception {
final Map<String, String> facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS);
final AutoCompleteResult result = _entityClient.autoComplete(
"chart",
query,
facetFilters,
filters,
limit,
context.getAuthentication());
return AutoCompleteResultsMapper.map(result);
Expand Down
Loading

0 comments on commit ba6426e

Please sign in to comment.