Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into ebean-batched-trans…
Browse files Browse the repository at this point in the history
…actions

# Conflicts:
#	metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java
#	metadata-io/src/main/java/com/linkedin/metadata/entity/EntityUtils.java
#	metadata-io/src/test/java/com/linkedin/metadata/entity/EbeanEntityServiceTest.java
#	metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java
#	metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java
#	metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java
#	metadata-service/restli-servlet-impl/src/test/java/com/linkedin/metadata/resources/entity/AspectResourceTest.java
#	metadata-service/services/src/main/java/com/linkedin/metadata/entity/DeleteEntityService.java
  • Loading branch information
david-leifker committed Jul 22, 2023
2 parents e65d540 + 056d361 commit e795eca
Show file tree
Hide file tree
Showing 264 changed files with 2,726 additions and 913 deletions.
2 changes: 1 addition & 1 deletion .github/scripts/check_event_type.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import sys

java_events = set()
with open("./metadata-io/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java") as java_file:
with open("./metadata-service/services/src/main/java/com/linkedin/metadata/datahubusage/DataHubUsageEventType.java") as java_file:
for line in java_file:
if '''Event"''' not in line:
continue
Expand Down
1 change: 1 addition & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,7 @@ project.ext.externalDependency = [
'jsonSimple': 'com.googlecode.json-simple:json-simple:1.1.1',
'jsonSmart': 'net.minidev:json-smart:2.4.9',
'json': 'org.json:json:20230227',
'junit': 'junit:junit:4.13.2',
'junitJupiterApi': "org.junit.jupiter:junit-jupiter-api:$junitJupiterVersion",
'junitJupiterParams': "org.junit.jupiter:junit-jupiter-params:$junitJupiterVersion",
'junitJupiterEngine': "org.junit.jupiter:junit-jupiter-engine:$junitJupiterVersion",
Expand Down
2 changes: 2 additions & 0 deletions datahub-graphql-core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ dependencies {
compile project(':metadata-service:restli-client')
compile project(':metadata-service:auth-impl')
compile project(':metadata-service:auth-config')
compile project(':metadata-service:configuration')
compile project(':metadata-service:services')
compile project(':metadata-io')
compile project(':metadata-utils')

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
import com.linkedin.datahub.graphql.generated.CorpUser;
import com.linkedin.datahub.graphql.generated.CorpUserInfo;
import com.linkedin.datahub.graphql.generated.CorpUserViewsSettings;
import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity;
import com.linkedin.datahub.graphql.generated.Dashboard;
import com.linkedin.datahub.graphql.generated.DashboardInfo;
import com.linkedin.datahub.graphql.generated.DashboardStatsSummary;
Expand All @@ -63,9 +62,9 @@
import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata;
import com.linkedin.datahub.graphql.generated.LineageRelationship;
import com.linkedin.datahub.graphql.generated.ListAccessTokenResult;
import com.linkedin.datahub.graphql.generated.ListOwnershipTypesResult;
import com.linkedin.datahub.graphql.generated.ListDomainsResult;
import com.linkedin.datahub.graphql.generated.ListGroupsResult;
import com.linkedin.datahub.graphql.generated.ListOwnershipTypesResult;
import com.linkedin.datahub.graphql.generated.ListQueriesResult;
import com.linkedin.datahub.graphql.generated.ListTestsResult;
import com.linkedin.datahub.graphql.generated.ListViewsResult;
Expand All @@ -80,6 +79,7 @@
import com.linkedin.datahub.graphql.generated.MLPrimaryKeyProperties;
import com.linkedin.datahub.graphql.generated.Notebook;
import com.linkedin.datahub.graphql.generated.Owner;
import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity;
import com.linkedin.datahub.graphql.generated.PolicyMatchCriterionValue;
import com.linkedin.datahub.graphql.generated.QueryEntity;
import com.linkedin.datahub.graphql.generated.QuerySubject;
Expand Down Expand Up @@ -196,9 +196,9 @@
import com.linkedin.datahub.graphql.resolvers.mutate.UpdateParentNodeResolver;
import com.linkedin.datahub.graphql.resolvers.mutate.UpdateUserSettingResolver;
import com.linkedin.datahub.graphql.resolvers.operation.ReportOperationResolver;
import com.linkedin.datahub.graphql.resolvers.ownership.CreateOwnershipTypeResolver;
import com.linkedin.datahub.graphql.resolvers.ownership.DeleteOwnershipTypeResolver;
import com.linkedin.datahub.graphql.resolvers.ownership.ListOwnershipTypesResolver;
import com.linkedin.datahub.graphql.resolvers.ownership.CreateOwnershipTypeResolver;
import com.linkedin.datahub.graphql.resolvers.ownership.UpdateOwnershipTypeResolver;
import com.linkedin.datahub.graphql.resolvers.policy.DeletePolicyResolver;
import com.linkedin.datahub.graphql.resolvers.policy.GetGrantedPrivilegesResolver;
Expand Down Expand Up @@ -303,6 +303,7 @@
import com.linkedin.metadata.config.TestsConfiguration;
import com.linkedin.metadata.config.ViewsConfiguration;
import com.linkedin.metadata.config.VisualConfiguration;
import com.linkedin.metadata.config.telemetry.TelemetryConfiguration;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.graph.GraphClient;
import com.linkedin.metadata.graph.SiblingGraphService;
Expand All @@ -312,12 +313,11 @@
import com.linkedin.metadata.recommendation.RecommendationsService;
import com.linkedin.metadata.secret.SecretService;
import com.linkedin.metadata.service.DataProductService;
import com.linkedin.metadata.service.LineageService;
import com.linkedin.metadata.service.OwnershipTypeService;
import com.linkedin.metadata.service.QueryService;
import com.linkedin.metadata.service.SettingsService;
import com.linkedin.metadata.service.ViewService;
import com.linkedin.metadata.service.LineageService;
import com.linkedin.metadata.telemetry.TelemetryConfiguration;
import com.linkedin.metadata.timeline.TimelineService;
import com.linkedin.metadata.timeseries.TimeseriesAspectService;
import com.linkedin.metadata.version.GitVersion;
Expand All @@ -340,6 +340,7 @@
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.IOUtils;
import org.dataloader.BatchLoaderContextProvider;
Expand All @@ -355,6 +356,7 @@
* A {@link GraphQLEngine} configured to provide access to the entities and aspects on the the GMS graph.
*/
@Slf4j
@Getter
public class GmsGraphQLEngine {

private final EntityClient entityClient;
Expand Down Expand Up @@ -613,7 +615,7 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) {
* @param builder
*/
private void configurePluginResolvers(final RuntimeWiring.Builder builder) {
this.graphQLPlugins.forEach(plugin -> plugin.configureExtraResolvers(builder));
this.graphQLPlugins.forEach(plugin -> plugin.configureExtraResolvers(builder, this));
}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import com.linkedin.metadata.config.TestsConfiguration;
import com.linkedin.metadata.config.ViewsConfiguration;
import com.linkedin.metadata.config.VisualConfiguration;
import com.linkedin.metadata.config.telemetry.TelemetryConfiguration;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.graph.GraphClient;
import com.linkedin.metadata.graph.SiblingGraphService;
Expand All @@ -28,7 +29,6 @@
import com.linkedin.metadata.service.QueryService;
import com.linkedin.metadata.service.SettingsService;
import com.linkedin.metadata.service.ViewService;
import com.linkedin.metadata.telemetry.TelemetryConfiguration;
import com.linkedin.metadata.timeline.TimelineService;
import com.linkedin.metadata.timeseries.TimeseriesAspectService;
import com.linkedin.metadata.version.GitVersion;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,10 @@ public interface GmsGraphQLPlugin {

/**
* Optional callback that a plugin can implement to configure any Query, Mutation or Type specific resolvers.
* @param wiringBuilder
* @param wiringBuilder : the builder being used to configure the runtime wiring
* @param baseEngine : a reference to the core engine and its graphql types
*/
default void configureExtraResolvers(final RuntimeWiring.Builder wiringBuilder) {
default void configureExtraResolvers(final RuntimeWiring.Builder wiringBuilder, final GmsGraphQLEngine baseEngine) {

}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ public CompletableFuture<BrowseResultsV2> get(DataFetchingEnvironment environmen
final int start = input.getStart() != null ? input.getStart() : DEFAULT_START;
final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT;
final String query = input.getQuery() != null ? input.getQuery() : "*";
// escape forward slash since it is a reserved character in Elasticsearch
final String sanitizedQuery = ResolverUtils.escapeForwardSlash(query);

return CompletableFuture.supplyAsync(() -> {
try {
Expand All @@ -64,7 +66,7 @@ public CompletableFuture<BrowseResultsV2> get(DataFetchingEnvironment environmen
maybeResolvedView != null
? SearchUtils.combineFilters(filter, maybeResolvedView.getDefinition().getFilter())
: filter,
query,
sanitizedQuery,
start,
count,
context.getAuthentication()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@
import com.linkedin.metadata.config.IngestionConfiguration;
import com.linkedin.metadata.config.TestsConfiguration;
import com.linkedin.metadata.config.ViewsConfiguration;
import com.linkedin.metadata.telemetry.TelemetryConfiguration;
import com.linkedin.metadata.config.VisualConfiguration;
import com.linkedin.metadata.config.telemetry.TelemetryConfiguration;
import com.linkedin.metadata.version.GitVersion;
import graphql.schema.DataFetcher;
import graphql.schema.DataFetchingEnvironment;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package com.linkedin.datahub.graphql.resolvers.ingest.execution;

import com.linkedin.metadata.config.IngestionConfiguration;
import com.linkedin.common.urn.Urn;
import com.linkedin.data.template.StringMap;
import com.linkedin.datahub.graphql.QueryContext;
Expand All @@ -9,7 +10,6 @@
import com.linkedin.entity.client.EntityClient;
import com.linkedin.execution.ExecutionRequestInput;
import com.linkedin.execution.ExecutionRequestSource;
import com.linkedin.metadata.config.IngestionConfiguration;
import com.linkedin.metadata.key.ExecutionRequestKey;
import com.linkedin.metadata.utils.EntityKeyUtils;
import com.linkedin.metadata.utils.IngestionUtils;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,8 @@ public static void addTermsToResource(
) throws URISyntaxException {
if (subResource == null || subResource.equals("")) {
com.linkedin.common.GlossaryTerms terms =
(com.linkedin.common.GlossaryTerms) getAspectFromEntity(resourceUrn.toString(), GLOSSARY_TERM_ASPECT_NAME, entityService, new GlossaryTerms());
(com.linkedin.common.GlossaryTerms) getAspectFromEntity(resourceUrn.toString(), GLOSSARY_TERM_ASPECT_NAME,
entityService, new GlossaryTerms());
terms.setAuditStamp(getAuditStamp(actor));

if (!terms.hasTerms()) {
Expand Down Expand Up @@ -321,7 +322,8 @@ private static MetadataChangeProposal buildRemoveTagsToEntityProposal(
EntityService entityService
) {
com.linkedin.common.GlobalTags tags =
(com.linkedin.common.GlobalTags) getAspectFromEntity(resource.getResourceUrn(), TAGS_ASPECT_NAME, entityService, new GlobalTags());
(com.linkedin.common.GlobalTags) getAspectFromEntity(resource.getResourceUrn(), TAGS_ASPECT_NAME,
entityService, new GlobalTags());

if (!tags.hasTags()) {
tags.setTags(new TagAssociationArray());
Expand Down Expand Up @@ -358,7 +360,8 @@ private static MetadataChangeProposal buildAddTagsToEntityProposal(
EntityService entityService
) throws URISyntaxException {
com.linkedin.common.GlobalTags tags =
(com.linkedin.common.GlobalTags) getAspectFromEntity(resource.getResourceUrn(), TAGS_ASPECT_NAME, entityService, new GlobalTags());
(com.linkedin.common.GlobalTags) getAspectFromEntity(resource.getResourceUrn(), TAGS_ASPECT_NAME,
entityService, new GlobalTags());

if (!tags.hasTags()) {
tags.setTags(new TagAssociationArray());
Expand Down Expand Up @@ -450,7 +453,8 @@ private static MetadataChangeProposal buildAddTermsToEntityProposal(
EntityService entityService
) throws URISyntaxException {
com.linkedin.common.GlossaryTerms terms =
(com.linkedin.common.GlossaryTerms) getAspectFromEntity(resource.getResourceUrn(), GLOSSARY_TERM_ASPECT_NAME, entityService, new GlossaryTerms());
(com.linkedin.common.GlossaryTerms) getAspectFromEntity(resource.getResourceUrn(), GLOSSARY_TERM_ASPECT_NAME,
entityService, new GlossaryTerms());
terms.setAuditStamp(getAuditStamp(actor));

if (!terms.hasTerms()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,7 @@ public static void removeOwnersFromResources(
private static MetadataChangeProposal buildAddOwnersProposal(List<OwnerInput> owners, Urn resourceUrn, Urn actor, EntityService entityService) {
Ownership ownershipAspect = (Ownership) getAspectFromEntity(
resourceUrn.toString(),
Constants.OWNERSHIP_ASPECT_NAME,
entityService,
Constants.OWNERSHIP_ASPECT_NAME, entityService,
new Ownership());
for (OwnerInput input : owners) {
addOwner(ownershipAspect, UrnUtils.getUrn(input.getOwnerUrn()), input.getType(), UrnUtils.getUrn(input.getOwnershipTypeUrn()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument;
import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES;
import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView;
import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*;


@Slf4j
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package com.linkedin.datahub.graphql.resolvers.ingest.execution;

import com.datahub.authentication.Authentication;
import com.linkedin.metadata.config.IngestionConfiguration;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.linkedin.datahub.graphql.QueryContext;
Expand All @@ -11,7 +12,6 @@
import com.linkedin.entity.EnvelopedAspectMap;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.metadata.Constants;
import com.linkedin.metadata.config.IngestionConfiguration;
import com.linkedin.mxe.MetadataChangeProposal;
import com.linkedin.r2.RemoteInvocationException;
import graphql.schema.DataFetchingEnvironment;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
package com.linkedin.datahub.graphql.resolvers.ingest.execution;

import com.datahub.authentication.Authentication;
import com.linkedin.metadata.config.IngestionConfiguration;
import com.linkedin.datahub.graphql.QueryContext;
import com.linkedin.datahub.graphql.generated.CreateTestConnectionRequestInput;
import com.linkedin.entity.client.EntityClient;
import com.linkedin.metadata.config.IngestionConfiguration;
import com.linkedin.mxe.MetadataChangeProposal;
import graphql.schema.DataFetchingEnvironment;
import org.mockito.Mockito;
Expand Down
1 change: 1 addition & 0 deletions datahub-upgrade/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ dependencies {
compile project(':metadata-io')
compile project(':metadata-service:factories')
compile project(':metadata-service:restli-client')
compile project(':metadata-service:configuration')
implementation externalDependency.charle

compile externalDependency.javaxInject
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@
import com.linkedin.datahub.upgrade.UpgradeStep;
import com.linkedin.datahub.upgrade.UpgradeStepResult;
import com.linkedin.metadata.Constants;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.models.AspectSpec;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.utils.PegasusUtils;
import com.datahub.util.RecordUtils;
import com.linkedin.metadata.entity.EntityService;
import com.linkedin.metadata.entity.ebean.EbeanAspectV1;
import com.linkedin.metadata.entity.ebean.EbeanAspectV2;
import com.linkedin.metadata.models.EntitySpec;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,7 @@ public NoCodeUpgrade(
final Authentication systemAuthentication,
final RestliEntityClient entityClient) {
_steps = buildUpgradeSteps(
server,
entityService,
server, entityService,
entityRegistry,
systemAuthentication,
entityClient);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
package com.linkedin.datahub.upgrade.system.elasticsearch.steps;

import com.linkedin.metadata.config.search.ElasticSearchConfiguration;
import com.linkedin.datahub.upgrade.UpgradeContext;
import com.linkedin.datahub.upgrade.UpgradeStep;
import com.linkedin.datahub.upgrade.UpgradeStepResult;
import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult;
import com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils;
import com.linkedin.metadata.config.search.ElasticSearchConfiguration;
import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder;
import com.linkedin.metadata.shared.ElasticSearchIndexed;
import lombok.extern.slf4j.Slf4j;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ public class UpgradeCliApplicationTestConfiguration {
private EbeanServer ebeanServer;

@MockBean
private EntityService entityService;
private EntityService _entityService;

@MockBean
private SearchService searchService;
Expand Down
21 changes: 17 additions & 4 deletions datahub-web-react/src/app/home/AcrylDemoBanner.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,17 @@ const StyledLink = styled(Link)`
font-weight: 700;
`;

const TextContent = styled.div`
max-width: 1025px;
`;

export default function AcrylDemoBanner() {
return (
<BannerWrapper>
<Logo src={AcrylLogo} />
<TextWrapper>
<Title>Schedule a Demo of Managed Datahub</Title>
<span>
<Title>Schedule a Demo of Managed DataHub</Title>
<TextContent>
DataHub is already the industry&apos;s #1 Open Source Data Catalog.{' '}
<StyledLink
href="https://www.acryldata.io/datahub-sign-up"
Expand All @@ -48,8 +52,17 @@ export default function AcrylDemoBanner() {
>
Schedule a demo
</StyledLink>{' '}
of Acryl Cloud to see the advanced features that take it to the next level!
</span>
of Acryl DataHub to see the advanced features that take it to the next level or purchase Acryl Cloud
on{' '}
<StyledLink
href="https://aws.amazon.com/marketplace/pp/prodview-ratzv4k453pck?sr=0-1&ref_=beagle&applicationId=AWSMPContessa"
target="_blank"
rel="noopener noreferrer"
>
AWS Marketplace
</StyledLink>
!
</TextContent>
</TextWrapper>
</BannerWrapper>
);
Expand Down
8 changes: 5 additions & 3 deletions docker/kafka-setup/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ ARG MAVEN_REPO="https://repo1.maven.org/maven2"
ARG SNAKEYAML_VERSION="2.0"

RUN rm /usr/share/java/cp-base-new/snakeyaml-*.jar \
&& wget -P /usr/share/java/cp-base-new $MAVEN_REPO/org/yaml/snakeyaml/$SNAKEYAML_VERSION/snakeyaml-$SNAKEYAML_VERSION.jar
&& wget -P /usr/share/java/cp-base-new $MAVEN_REPO/org/yaml/snakeyaml/$SNAKEYAML_VERSION/snakeyaml-$SNAKEYAML_VERSION.jar

# Based on https://github.com/blacktop's alpine kafka build
FROM python:3-alpine
Expand All @@ -17,7 +17,7 @@ ENV SCALA_VERSION 2.13
ENV CUB_CLASSPATH='"/usr/share/java/cp-base-new/*"'

# Confluent Docker Utils Version (Namely the tag or branch to grab from git to install)
ARG PYTHON_CONFLUENT_DOCKER_UTILS_VERSION="v0.0.58"
ARG PYTHON_CONFLUENT_DOCKER_UTILS_VERSION="v0.0.60"

# This can be overriden for an offline/air-gapped builds
ARG PYTHON_CONFLUENT_DOCKER_UTILS_INSTALL_SPEC="git+https://github.com/confluentinc/confluent-docker-utils@${PYTHON_CONFLUENT_DOCKER_UTILS_VERSION}"
Expand All @@ -36,7 +36,9 @@ RUN mkdir -p /opt \
&& adduser -DH -s /sbin/nologin kafka \
&& chown -R kafka: /opt/kafka \
&& echo "===> Installing python packages ..." \
&& pip install --no-cache-dir jinja2 requests \
&& pip install --no-cache-dir --upgrade pip wheel setuptools \
&& pip install jinja2 requests \
&& pip install "Cython<3.0" "PyYAML<6" --no-build-isolation \
&& pip install --prefer-binary --prefix=/usr/local --upgrade "${PYTHON_CONFLUENT_DOCKER_UTILS_INSTALL_SPEC}" \
&& rm -rf /tmp/* \
&& apk del --purge .build-deps
Expand Down
1 change: 1 addition & 0 deletions docker/quickstart/generate_and_compare.sh
Original file line number Diff line number Diff line change
Expand Up @@ -8,5 +8,6 @@ set -euxo pipefail
python3 -m venv venv
source venv/bin/activate

pip install --upgrade pip wheel setuptools
pip install -r requirements.txt
python generate_docker_quickstart.py check-all
Loading

0 comments on commit e795eca

Please sign in to comment.